In [1]:
# Parameters
SWARM_PATH3 = "/home/kamran.karimi1/Project/git/Swarm/dataset/RidgeCrest/catalog_fmsearch.csv"
n_thresh = 0.01
SWARM_PATH4 = "/home/kamran.karimi1/Project/git/Swarm/dataset/RidgeCrest/momentTensors.pkl"

fetch the data: Shelly 2020

In [2]:
#--- extract the catalog (new)

import pandas as pd
#import matplotlib; matplotlib.use('agg')
import sys
from matplotlib import rc
import matplotlib
import matplotlib.pylab as plt
import numpy as np
from matplotlib import patches
import warnings
import matplotlib.ticker
import matplotlib as mpl
#from mpl_toolkits.mplot3d import Axes3D
import datetime
#import path
from math import *
import matplotlib.dates as mdates
from matplotlib import dates
import matplotlib.dates as md
import matplotlib.cm as cm
from matplotlib.font_manager import FontProperties
import itertools
import traceback
from scipy.ndimage import gaussian_filter
import math

def DrawFrame(ax, (alpha_xl,alpha_xr),(alpha_yb,alpha_yt),linewidth,LOG_X=None,LOG_Y=None):
    [xlo,xhi,ylo,yhi] = ax.axis()
    if LOG_X:
        [xlo,xhi,junk,junk] = np.log10(ax.axis())
    if LOG_Y:
        [junk,junk,ylo,yhi] = np.log10(ax.axis())
    lx = xhi - xlo
    ly = yhi - ylo
    xy = [xlo - alpha_xl * lx, ylo - alpha_yb * ly]
    height = ly*(1+alpha_yb+alpha_yt)
    width = lx*(1+alpha_xl+alpha_xr)
    xy_end=[xy[0]+width,xy[1]+height]
    if LOG_X:
        xy[0] = 10 ** xy[0]
        xy_end[0] = 10 ** xy_end[0]
    if LOG_Y:
        xy[1] = 10 ** xy[1]
        xy_end[1] = 10 ** xy_end[1]
    ax.add_patch( patches.Rectangle(xy=xy, width=xy_end[0]-xy[0], 
                                    height=xy_end[1]-xy[1], linewidth=linewidth,
                                    clip_on=False,facecolor=None,edgecolor='black',fill=None) ) 
    
#--- add a new time attribute
def ConvertTime( df_in ):
    df=df_in.copy()
    df.insert(0,'date',pd.to_datetime(swarm[['year', 'month', 'day', 'hour', 'minute', 'second']]))
    df.drop(['year', 'month', 'day', 'hour', 'minute', 'second'],axis=1,inplace=True)
    return df

# #--- set path
# SWARM_PATH = './dataset/RidgeCrest/DataS1_noXYZ.txt' #--- comment if passed by argument
# DIR_OUTPT = '.'
# DIR_OUTPT_figs = '.' 
# #--- store
# swarm = pd.read_csv( SWARM_PATH, sep = ',' ) #--- parse data

# swarm = ConvertTime( swarm ) #--- add new column 'date'

# #--- sort based on time
# swarm.sort_values(by=['date'],inplace=True)

# #--- reindex
# swarm.reset_index(inplace=True,drop=True)

# #--- rename cols
# swarm = swarm.rename(index=str,columns={'lat':'latitude','lon':'longitude','mag':'magnitude'})

# swarm.head()
In [3]:
mpl.rcParams.update(mpl.rcParamsDefault)
warnings.filterwarnings('ignore') #--- get rid of warnings

rc('text', usetex=True)
font = {'size'   : 20}
matplotlib.rc('font', **font)
In [4]:
    
# #--- add a new time attribute
# def ConvertTime( df_in ):
    
#     df=df_in.copy()
#     df['date']=pd.to_datetime(df['Event ID'] +' ' + df['Origin Time'])
#     df['Event ID']=df.index
# #    df.insert(0,'date',pd.to_datetime(swarm[['year', 'month', 'day', 'hour', 'minute', 'second']]))
#     df.drop(['Origin Time'],axis=1,inplace=True)
#     return df

# #--- set path
# SWARM_PATH = './dataset/RidgeCrest/Dataset S2.txt' #--- comment if passed by command line
# DIR_OUTPT = '.' #'./dataset/RidgeCrest' #sys.argv[2]
# DIR_OUTPT_figs = '.' #'./dataset/figs' #'/Users/Home/Desktop/Tmp/txt' #'/Users/Home/Dropbox/Documents/papers/microSeismicityOklahoma/figs'

# #--- store
# swarm = pd.read_csv( SWARM_PATH, sep = ',' ) #--- parse data

# swarm = ConvertTime( swarm ) #--- add new column 'date'

# #--- sort based on time
# swarm.sort_values(by=['date'],inplace=True)

# #--- reindex
# swarm.reset_index(inplace=True,drop=True)

# #--- rename cols
# swarm = swarm.rename(index=str,columns={'Latitude':'latitude','Longitude':'longitude',
#                                         'Magnitude (SCEDC preferred)':'magnitude','Depth (km)':'depth'})

# swarm.head()
In [5]:
#--- remove nan

#swarm = swarm.dropna(subset=['magnitude'])
#swarm.info()

Moment tensors scedc

In [6]:
    
# #--- add a new time attribute
# def ConvertTime( df_in ):
    
#     df=df_in.copy()    
#     df['date']=pd.to_datetime(df['Date']+' '+df['Time(UTC)'])
# #    df['date']=pd.to_datetime(df['Event ID'] +' ' + df['Origin Time'])
# #    df['Event ID']=df.index
# #    df.insert(0,'date',pd.to_datetime(df_moment[['year', 'month', 'day', 'hour', 'minute', 'second']]))
#     df.drop(['Time(UTC)','(Ml)','Mp','originalDepth','Solutions','VR','Date'],axis=1,inplace=True)
#     return df

# #--- set path
# SWARM_PATH2 = './dataset/RidgeCrest/momentTensor.csv' #--- comment if passed by command line

# #--- store
# df_moment = pd.read_csv( SWARM_PATH2, sep = ',' ) #--- parse data

# df_moment = ConvertTime( df_moment ) #--- add new column 'date'

# #--- sort based on time
# df_moment.sort_values(by=['date'],inplace=True)

# #--- reindex
# df_moment.reset_index(inplace=True,drop=True)

# #--- rename cols
# df_moment = df_moment.rename(index=str,columns={'EventID':'Event ID','Lat.':'latitude',
#                                                 'Long.':'longitude','Mw':'magnitude'})

# df_moment.head()

fetch tensors from the html files

  • use event id's from the above catalog
In [7]:
# azimuth_t=[]
# azimuth_n=[]
# azimuth_p=[]
# plunge_t = []
# plunge_n = []
# plunge_p = []
# mt = []
# mn = []
# mp = []
# for items in df_moment.itertuples():
#     if 1: #try:
#         eventID = items[1]
#         html_url='https://service.scedc.caltech.edu/MomentTensor/solutions/web_%s/ci%s_MT.html'%(eventID,eventID)
# #        print html_url
#         dff=pd.read_html(html_url)
#         dff=dff[2] #--- eigenvalues and eigenmodes
        
# #        if eventID not in list(swarm['Event ID']):
# #            continue
# #         #--- stress catalog
# # #        if eventID == 38636511:
# #         print eventID, df_st[df_st['Event ID']==eventID]['Origin Time'].iloc[0]-items[4]
# #            print df_st[df_st['Event ID']==eventID]['Origin Time'].iloc[0],items[4]

# #         #--- parse moment tensor
#         azimuth_t.append(float(dff.iloc[2][3]))
#         azimuth_n.append(float(dff.iloc[3][3]))
#         azimuth_p.append(float(dff.iloc[4][3]))
#         plunge_t.append(float(dff.iloc[2][2]))
#         plunge_n.append(float(dff.iloc[3][2]))
#         plunge_p.append(float(dff.iloc[4][2]))
    
    
#         mt.append(float(dff.iloc[2][1]))
#         mn.append(float(dff.iloc[3][1]))
#         mp.append(float(dff.iloc[4][1]))
#         assert float(dff.iloc[2][1]) > 0.0
#         assert float(dff.iloc[4][1]) < 0.0

insert as columns

In [8]:
# df_moment.insert(len(df_moment.keys()),'mt',mt)
# df_moment.insert(len(df_moment.keys()),'Azimuth(T)',azimuth_t)
# df_moment.insert(len(df_moment.keys()),'Plunge(T)',plunge_t)

# df_moment.insert(len(df_moment.keys()),'mn',mn)
# df_moment.insert(len(df_moment.keys()),'Azimuth(N)',azimuth_n)
# df_moment.insert(len(df_moment.keys()),'Plunge(N)',plunge_n)

# df_moment.insert(len(df_moment.keys()),'mp',mp)
# df_moment.insert(len(df_moment.keys()),'Azimuth(P)',azimuth_p)
# df_moment.insert(len(df_moment.keys()),'Plunge(P)',plunge_p)

common events between the two catalogs

In [9]:
# df_moment.index=df_moment['Event ID']
# common_elm = list(set(df_moment['Event ID']) & set(swarm['Event ID']))
# df_moment = df_moment.loc[common_elm]
# df_moment.reset_index(drop=True, inplace=True)
# df_moment.head()

moment tensors scedc

Column legend:

EVID : Event Id
ET : Earthquake Type
MAG : Event Magnitude
M : Magnitude Type
LAT : Latitude
LON : Longitude
DEPTH : Earthquake Depth
Q : Quality
NPH : Number of phases or arrivals
WRMS : Weighted RMS
ERHOR : Horizontal Error (location uncertainty that accompanies the location)
ERDEP : Depth Error (standard deviation of depth estimate)
ERTIME : Origin Time Error (standard deviation of origin time)
STRIKE : Strike of fault plane
DIP : Dip of fault plane
RAKE : Rake of fault plane
FPUC : Fault Plane Uncertainty in degrees
APUC : Auxialiary Plane Uncertainty in degrees
NPPL : Number of P Polarities
MFRAC : Fraction Misfit Polarities
FMQ : Focal Mechanism Quality
PROB : Probability true mechanism is "close" to preferred solution
STDR : Station Distribution
NSPR : Number of S/P Ratios
MAVG : Average log10(S/P) Misfit

In [10]:
    
#--- add a new time attribute
def ConvertTime( df_in ):
    
    df=df_in.copy()
#    display(df.head())
    df.insert(loc=2, column='date', value=pd.to_datetime(df['#YYY/MM/DD']+' '+df['HH:mm:SS.ss']))
#    df['date']=pd.to_datetime(df['Event ID'] +' ' + df['Origin Time'])
#    df['Event ID']=df.index
#    df.insert(0,'date',pd.to_datetime(df_moment[['year', 'month', 'day', 'hour', 'minute', 'second']]))
    df.drop(['#YYY/MM/DD','HH:mm:SS.ss', u'ET', u'GT',u'M', u'Q', u'NPH', u'WRMS', u'ERHOR', u'ERDEP',
       u'ERTIME', u'STRIKE', u'DIP', u'RAKE', u'FPUC', u'APUC', u'NPPL',
       u'MFRAC', u'FMQ', u'PROB', u'STDR', u'NSPR', u'MAVG'],axis=1,inplace=True)
    return df

#--- set path
#SWARM_PATH3 = './dataset/RidgeCrest/catalog_fmsearch.csv' #--- comment if passed by command line
DIR_OUTPT = '.'
DIR_OUTPT_figs = '.' 

#--- store
df_moment = pd.read_csv( SWARM_PATH3, sep = ',' ) #--- parse data

df_moment = ConvertTime( df_moment ) #--- add new column 'date'

#--- sort based on time
df_moment.sort_values(by=['date'],inplace=True)

#--- reindex
df_moment.reset_index(inplace=True,drop=True)

#--- rename cols
df_moment = df_moment.rename(index=str,columns={'EVID':'Event ID','LAT':'latitude',
                                                'LON':'longitude','MAG':'magnitude', 'DEPTH':'depth'})

#df_moment = df_moment.iloc[:32]
df_moment.head()
#df_moment.keys()
Out[10]:
Event ID date magnitude latitude longitude depth
0 38443095 2019-07-04 17:02:55.340 3.98 35.708 -117.504 10.6
1 38443111 2019-07-04 17:09:20.090 2.49 35.708 -117.501 11.0
2 38443127 2019-07-04 17:12:14.970 2.13 35.706 -117.503 10.5
3 38443183 2019-07-04 17:33:49.000 6.40 35.705 -117.504 10.5
4 38443199 2019-07-04 17:37:55.500 3.86 35.746 -117.552 8.3
In [11]:
df_moment.info()
<class 'pandas.core.frame.DataFrame'>
Index: 8823 entries, 0 to 8822
Data columns (total 6 columns):
Event ID     8823 non-null int64
date         8823 non-null datetime64[ns]
magnitude    8823 non-null float64
latitude     8823 non-null float64
longitude    8823 non-null float64
depth        8823 non-null float64
dtypes: datetime64[ns](1), float64(4), int64(1)
memory usage: 482.5+ KB

fetch angles

In [12]:
# def func(x):
#     try:
#         html_url = x
#         dff=pd.read_html(html_url)
#         dff=dff[3] #--- eigenvalues and eigenmodes
#         #--- parse moment tensor
#         assert dff.iloc[0]['Axis'] == 'P'
#         assert dff.iloc[1]['Axis'] == 'T'
#         assert dff.iloc[1]['Azimuth'][-4:-1] == '&de'
#         assert dff.iloc[0]['Azimuth'][-4:-1] == '&de'
#         assert dff.iloc[1]['Plunge'][-4:-1] == '&de'
#         assert dff.iloc[0]['Plunge'][-4:-1] == '&de'
#         azimuth_t = float(dff.iloc[1]['Azimuth'][:-4])
#         azimuth_p = float(dff.iloc[0]['Azimuth'][:-4])
#         plunge_t  = float(dff.iloc[1]['Plunge'][:-4])
#         plunge_p  = float(dff.iloc[0]['Plunge'][:-4])
#         return (azimuth_t,plunge_t,azimuth_p,plunge_p)        
#     except:
#         return (None, None, None, None )
# df_moment['html']=df_moment['Event ID'].apply(lambda x: 'https://service.scedc.caltech.edu/FocMech/ci%s.cifm1.html'%(x))
# s_series = df_moment['html'].apply(func)
# df_moment['Azimuth(T)'] = s_series.apply(lambda x: x[0])
# df_moment['Plunge(T)'] = s_series.apply(lambda x: x[1])
# df_moment['Azimuth(P)'] = s_series.apply(lambda x: x[2])
# df_moment['Plunge(P)'] = s_series.apply(lambda x: x[3])

# df_moment.drop(['html'],axis=1,inplace=True)
In [13]:
df_moment.head() 
Out[13]:
Event ID date magnitude latitude longitude depth
0 38443095 2019-07-04 17:02:55.340 3.98 35.708 -117.504 10.6
1 38443111 2019-07-04 17:09:20.090 2.49 35.708 -117.501 11.0
2 38443127 2019-07-04 17:12:14.970 2.13 35.706 -117.503 10.5
3 38443183 2019-07-04 17:33:49.000 6.40 35.705 -117.504 10.5
4 38443199 2019-07-04 17:37:55.500 3.86 35.746 -117.552 8.3

save data frame

In [14]:
#SWARM_PATH4 = './dataset/RidgeCrest/momentTensors.pkl' #--- comment if passed by argument

#df_moment.to_pickle('./dataset/RidgeCrest/momentTensors.pkl')
swarm=pd.read_pickle(SWARM_PATH4)
In [15]:
#--- plot spatial map 
#--- fault map california: temporal evolution of events


def DataFrameSubSet( df, column, (xmin,xmax) ):
    return df[ ( df[column] >= xmin ) & 
               ( df[column] < xmax ) ]

#--- subset of data
swarm_lohi = swarm.copy()#DataFrameSubSet( swarm, 
                          #   'date', 
                           #  ( pd.to_datetime('2010-04-04'), pd.to_datetime('2010-06-26') ) )

swarm_lohi.plot.scatter('longitude','latitude',
                        s=3**(swarm_lohi['magnitude']),
                        c='date',cmap='jet',
                        alpha=0.1,figsize=(5,4)) #--- plot

# swarm_lohi.plot.scatter('longitude','depth',
#                         s=3**(swarm_lohi['magnitude']),
#                         c='date',cmap='jet',
#                         alpha=0.1,figsize=(5,4)) #--- plot

# swarm_lohi.plot.scatter('latitude','depth',
#                         s=3**(swarm_lohi['magnitude']),
#                         c='date',cmap='jet',
#                         alpha=0.1,figsize=(5,4)) #--- plot

plt.show()
#plt.savefig('%s/map.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')

#plt.figure(figsize=(6,6)).gca(projection='3d')
#plt.xlabel('Long')
#plt.ylabel('Lat')
#plt.scatter(swarm_lohi['longitude'],
 #           swarm_lohi['latitude'],
  #          swarm_lohi['depth']) #s=10*swarm['magnitude'],cmap='jet') #--- plot

make movies

In [16]:
# import os

# class makeDATA:
#     def __init__(self, write, read = None ):
#         self.wirite = write
#         self.read = read   # embedded object: attrs (coord, lohi, type, vel)

#     def __del__( self ):
#         self.wirite.close()

#     def Map(self, coord, comnt, *args ):
#          nAtom = len(coord)
#          self.wirite.write('%s\n%s\n'%(nAtom,comnt))
#          for i in coord:
#              ndime = len( coord[i] )
#              self.wirite.write('%s\t'% i)
#  #           self.wirite.write('%d\t'% type[i])
#              for idime in xrange( ndime ):
#                  self.wirite.write('%e\t'%coord[i][idime] )
#              if args:
#                  for dics in args:
#  #                   if type( dics[ i ] == type( 1 ) ):
#  #                       self.wirite.write( '%d\t' % dics[ i ] )
#  #                   else:
#                      VAL = 0.0 if abs(dics[ i ])<1.0e-15 else dics[ i ]
#                      self.wirite.write( '%s\t' % VAL )
#                  self.wirite.write( '\n' )
#              else:
#                  self.wirite.write( '\n' )
#          self.wirite.flush()
#          self.wirite.close()

# swarm_lohi.sort_values(by='date',inplace=True)            
# coord_length=swarm_lohi['longitude'].apply(lambda x:[x])+swarm_lohi['latitude'].apply(lambda x:[x])
# Magnitude = np.exp(swarm_lohi['magnitude'])/20000

# makeDATA( open( '%s/map.xyz'%DIR_OUTPT, 'w' ), None ).Map( coord_length.to_dict(), 'ITIME=%s'%0, Magnitude.to_dict()) #--- instantanuous
# EVENT_ID = Magnitude.keys()
# EVENT_ID = map(int,EVENT_ID)
# EVENT_ID.sort()
# cords={}
# mag={}
# t0 = None
# os.system('rm %s/temporalMap.xyz'%DIR_OUTPT)
# for j_event,indexx in zip(EVENT_ID,xrange(10000)):
# #    print j_event
#     t = swarm_lohi['date'].iloc[ j_event ]
#     if not t0:
#         t0 = t
#     assert t >= t0, 't0=%s,t=%s'%( t0, t )
#     #--- temporal map 
#     cords[ j_event ] = coord_length[ j_event ]
#     mag[ j_event ] = Magnitude[ j_event ]
#     itime=swarm_lohi[ 'date' ].iloc[ j_event ]
#     tstr='%s-%s'%(itime.strftime("%x"), itime.strftime("%X"))
#     makeDATA( open( '%s/temporalMap.xyz'%DIR_OUTPT, 'a' ), None ).Map( cords, 'ITIME=%s'%itime, mag ) #--- instantanuous
#     t0 = t
    
    
    
In [17]:
# #--- plot timeseries


class Tlohi:
     def __init__(self,lo,hi):
        self.lo = lo
        self.hi = hi

# fig=plt.figure(figsize=(8,4))
# ax=fig.add_subplot(111)
# plt.xlabel('Time')
# plt.ylabel('M')

# plt.ylim(swarm['magnitude'].min(),swarm['magnitude'].max())

# tt=swarm[swarm['magnitude']==swarm['magnitude'].sort_values(ascending=False).iloc[3]]['date']
# plt.xlim(tt-datetime.timedelta(days=30),tt+datetime.timedelta(days=30))# pd.to_datetime('2014-12-04'),pd.to_datetime('2015-06-1'))
# #plt.xlim(swarm['date'].min(),pd.to_datetime('2010-07-20'))
# plt.scatter(swarm['date'],swarm['magnitude'],
#             s=2*np.exp(swarm['magnitude']),
#             alpha=0.2)

# #myTlohi = Tlohi( pd.to_datetime('2010-04-08'), 
# #                 pd.to_datetime('2010-07-01'))

# #plt.plot([myTlohi.lo,myTlohi.lo],
# #        [-2,7],'r-')

# #plt.plot([myTlohi.hi,myTlohi.hi],
# #        [-2,7],'r-')
# fig.autofmt_xdate()
# plt.savefig('%s/timeSeries.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')
# plt.show()
In [18]:
#--- freq-magnitude relation

import sys 

def histogramACCUMLTD( slist ):
    assert type( slist ) == type( [] ), 'arg must be a list. a %s is given!' %( type( slist ) )
    d = {}
    for item in slist:
        try:
            d[ item ] += 1
        except:
            d[ item ] = 1
    keys = d.keys()
    keys.sort()

    cdf = 0.0
    xi = min( slist ) - 1.0e-6
    xf = max( slist ) + 1.0e-6
    npoin = len( slist )
    adict = {}
    for ikey, index in zip( keys, xrange( sys.maxint ) ):
        adict[ index ] = [ xi, ikey, cdf ]
        cdf += 1.0 * d[ ikey ] # / npoin
        xi = ikey
    adict[ index + 1 ] = [ xi, xf, cdf ]
    return adict





#--- set min/max time to avoid temporal incompletenesss issue
swarm_copy = swarm.copy()#DataFrameSubSet( swarm, 
                          #   'date', 
                           #  ( myTlohi.lo, 
                            #   myTlohi.hi ) )
#--- accumulated histogram
N = len(swarm_copy['magnitude'])
slist=np.array(swarm_copy['magnitude'])
slist.sort()
d = histogramACCUMLTD( slist.tolist() )
keys=d.keys()
keys.sort()

#--- plot
fig= plt.figure(figsize=(3,3))#,dpi=150)
ax = fig.add_subplot(111)

ax.axis([-1,6,.9,1e5])
ax.set_yscale('log')

#--- add major xticks
xmin=np.ceil(ax.axis()[0])
xmax=np.floor(ax.axis()[1])
nbin = xmax - xmin
ax.set_xticks(np.linspace(ax.axis()[0],ax.axis()[1],int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
#locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
#ax.xaxis.set_minor_locator(locmin)
#ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')


xmin=swarm_copy['magnitude'].min()
xmax=swarm_copy['magnitude'].max()
dx=0.1
junk = ax.hist( swarm_copy['magnitude'],
                bins=int((xmax-xmin)/dx),
               label='histogram',color='silver') #--- histogram
xx=[];yy=[]
for ikey in keys:
    xx.append(d[ikey][0])
    yy.append(N-d[ikey][2])
ax.plot(xx,yy,
            linestyle='-', drawstyle='steps-post',color='black',
             linewidth=1.0) #--- accumulated
    
b=0.8
ax.plot([2.5,6],[1e4, 1e4* 10**(-b*(6-2.5))],'r-.',linewidth=1)

DrawFrame(ax, (0.2,0.06),(0.32,0.1),0.01,LOG_Y=True) 

#plt.savefig('%s/gr.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')#,transparent=True)
plt.show()
In [19]:
#--- estimate b by varying the mc value

from math import *
import scipy.stats

#--- set min/max time to avoid temporal incompletenesss issue
swarm_copy = swarm.copy()#DataFrameSubSet( swarm.copy()#, 
                          #   'date', #
                           #  ( myTlohi.#lo, 
                            #   myTlohi.hi# ) )

#--- plot distributions (Nm)
fig = plt.figure(figsize=(12,4))
ax=fig.add_subplot(131)
ax.set_xlabel('$m$')
ax.set_ylabel('$N(mag.>m)$')
xmin = np.floor(swarm_copy['magnitude'].min())
xmax = np.ceil(swarm_copy['magnitude'].max())
ax.set_xticks(np.linspace(xmin,xmax,int(xmax-xmin)/1+1))
ax.set_xlim(xmin,xmax)
#plt.ylim(1,1e5)
ax.set_yscale('log')
for ikey in keys:
    ax.plot([d[ikey][0],d[ikey][1]],
             [N-d[ikey][2],N-d[ikey][2]],
             '-',color='black') #--- accumulated
junk = ax.hist( swarm_copy['magnitude'],
                bins=64,
               label='histogram') #--- histogram

#--- list of magnitudes
mm_list = swarm_copy['magnitude'].to_list()
mm_list = np.sort(mm_list)

#--- b vs mc
ax2=fig.add_subplot(132)
ax2.set_xticks(np.linspace(-1.0,3.0,5))
#ax2.set_ylim(0,1.8)
#ax4 = ax2.twinx()
#ax4.set_yscale('log')
#ax4.set_ylim(1e-16,)
#plt.xlim(0.0,1.5)
ax2.set_xlabel('$m_c$')
ax2.set_ylabel('$b$')
#ax4.set_ylabel(r'$p_{val}$',labelpad=-70)

#--- bvalue bhatacharya eq. 5
DM = 0.1
n = 30
#--- 
MC = []
B = []
Err_B = []
pval = []
for mc_new in [0.05+i*0.1 for i in xrange(n)]: #--- m values are quantized (+- 0.1)!
    mlist = swarm_copy[ swarm_copy['magnitude']-mc_new>=0.0]['magnitude'] 
    m_mean=mlist.mean()
    assert m_mean-mc_new > 0.0, 'm_mean=%s,mc=%s'%( m_mean, mc )
    k_mean = (m_mean-mc_new)/DM
    b_value = log(np.exp(1),10)*log(1+DM/(m_mean-mc_new))/DM
    nsamp = len(mlist)
#    err_b = 1.96*(b_value/nsamp/log(10,exp(1.0)))**0.5
    err_b = 2*(k_mean*(k_mean+1)/nsamp)**0.5/log(10,exp(1.0)) #--- Guttorp and Hopkins (1986) eq.(4)

    
    
#     #--- bvalue bhatacharya eq. 6
#     #--- accumulated histogram
#     slist = mm_list[ mm_list>=mc_new ]
#     slist.sort()
#     NN = len(slist)
#     cdf = histogramACCUMLTD( slist.tolist() )
#     #--- bvalue bhatacharya eq. 6
#     n_cdf=len(cdf)-1
#     bi = []
#     for i in xrange(n_cdf):
#         mi = cdf[ i ][ 0 ]
#         Ni = NN - cdf[ i ][ 2 ]
#         bij = []
#         for j in xrange(n_cdf):
#             mj = cdf[ j ][ 0 ]
#             Nj = NN - cdf[ j ][ 2 ]
#             if i == j: continue
#             slope = ( log( Nj, 10 ) - log( Ni, 10 ) ) / ( mj - mi )
#             assert not np.isnan(slope)
#             bij.append( slope )
#         bi.append( np.median( bij ) )
#     bval_2nd = -np.median( bi )
#     err_b2nd = np.std(bi)/(len(bi))**0.5
    
    #--- plot b vs mc
    ax2.errorbar([mc_new],[b_value],yerr=err_b,marker='o',color='black')
#    ax2.errorbar([mc_new],[bval_2nd],yerr=err_b2nd, marker='o',color='red')
    
    #---
    MC.append(mc_new)
    B.append(b_value)
    Err_B.append(err_b)

    #--- p-value (by hand)
#    (dmax,pval2nd) = scipy.stats.kstest( mlist, lambda x: 1.0-10**(-b_value*(x-mc_new)))
#    ax4.plot([mc_new],[pval2nd],marker='o',color='red')
#    pval.append(pval2nd)

#--- choose the range of mc
mc_range=[1.5,1.5]
index_tf = [i and j for i, j in zip(mc_range[0]<=np.array(MC),np.array(MC)<mc_range[1])]
bvall =  np.array(B)[index_tf]
Err_B =  np.array(Err_B)[index_tf]

#--- b vs mc: vertical lines
#ax2.plot([mc_range[0],mc_range[0]],[np.min(B),np.max(B)],'--r')    
#ax2.plot([mc_range[1],mc_range[1]],[np.min(B),np.max(B)],'--r')    

#--- N vs m: vertical line
#ax.plot([mc_range[0]+DM/2,mc_range[0]+DM/2],
 #       [1,len(swarm_copy['magnitude'])],'--r')    

#--- n*10*bm (rescaled)
bval=0.8 #np.mean(bvall)
#bval2=0.8
ax3=fig.add_subplot(133)
ax3.set_xlabel('$m$')
#ax3.set_ylabel(r'$N\times10^{%s\times m}$'%bval)
ax3.set_ylabel(r'$N\times10^{%s\times m}$'%bval)
ax3.yaxis.set_label_position("right")
ax3.yaxis.tick_right()
ax3.set_yscale('log')
ax3.set_xticks(np.linspace(xmin,xmax,int(xmax-xmin)/1+1))
ax3.set_xlim(xmin,xmax)
#ax3.set_ylim(1e3,1e6)
#ax3.set_xticks(np.linspace(ax3.axis()[0],ax3.axis()[1],4))
for ikey in keys: #--- rescaled
    c = 10**(bval*d[ikey][0])
    ax3.plot([d[ikey][0],d[ikey][1]],
             [c*(N-d[ikey][2]),c*(N-d[ikey][2])],'-',color='black',
            ) #--- accumulated
#     c = 10**(bval2*d[ikey][0])
#     ax3.plot([d[ikey][0],d[ikey][1]],
#              [c*(N-d[ikey][2]),c*(N-d[ikey][2])],'-',color='red',
#              ) #--- accumulated
ax3.plot([mc_range[0]+DM/2,mc_range[0]+DM/2], #--- vertical line
        [len(swarm_copy[swarm_copy['magnitude']>mc_range[0]])*10**(bval*mc_range[0])/10,
         len(swarm_copy[swarm_copy['magnitude']>mc_range[0]])*10**(bval*mc_range[0])],'--r')    
#plt.plot(MC, pval,'-o',color='black')

fig.tight_layout(w_pad=-0.5)
#fig.savefig('%s/b.png'%DIR_OUTPT_figs,dpi=75)
plt.show()
In [20]:
#--- estimate b by varying the mc value

from math import *
import scipy.stats

#--- set min/max time to avoid temporal incompletenesss issue
swarm_copy = swarm.copy()#DataFrameSubSet( swarm.copy()#, 
                          #   'date', #
                           #  ( myTlohi.#lo, 
                            #   myTlohi.hi# ) )

#--- plot distributions (Nm)
fig = plt.figure(figsize=(12,4))
ax=fig.add_subplot(131)
ax.set_xlabel('$m$')
ax.set_ylabel('$N(mag.>m)$')
xmin = np.floor(swarm_copy['magnitude'].min())
xmax = np.ceil(swarm_copy['magnitude'].max())
ax.set_xticks(np.linspace(xmin,xmax,int(xmax-xmin)/1+1))
ax.set_xlim(xmin,xmax)
#plt.ylim(1,1e5)
ax.set_yscale('log')
for ikey in keys:
    ax.plot([d[ikey][0],d[ikey][1]],
             [N-d[ikey][2],N-d[ikey][2]],
             '-',color='black') #--- accumulated
junk = ax.hist( swarm_copy['magnitude'],
                bins=64,
               label='histogram') #--- histogram

#--- list of magnitudes
mm_list = swarm_copy['magnitude'].to_list()
mm_list = np.sort(mm_list)

#--- b vs mc
ax2=fig.add_subplot(132)
ax2.set_xticks(np.linspace(0,1,5,endpoint=True))
#ax2.set_ylim(0.5,1.0)
ax4 = ax2.twinx()
ax4.set_yscale('log')
ax4.set_ylim(1e-16,)
ax2.set_xlabel('$m_c$')
ax2.set_ylabel('$b$')
ax4.set_ylabel(r'$p_{val}$',labelpad=-10)
ax4.set_xticks(np.linspace(0,1,5,endpoint=True))

#--- bvalue bhatacharya eq. 5
DM = 0.01
n = 200
#--- 
MC = []
B = []
Err_B = []
pval = []
for mc_new in [i*DM for i in xrange(n)]: #--- m values are quantized (+- 0.1)!
    mlist = swarm_copy[ swarm_copy['magnitude']-mc_new>=0.0]['magnitude'] 
    m_mean=mlist.mean()
    assert m_mean-mc_new > 0.0#, 'm_mean=%s,mc=%s'%( m_mean, mc )
    k_mean = (m_mean-mc_new)/DM
    b_value = log(np.exp(1),10)*log(1+DM/(m_mean-mc_new))/DM
    nsamp = len(mlist)
    err_b = 1.96 * b_value / nsamp ** 0.5 #1.96*(b_value/nsamp/log(10,exp(1.0)))**0.5
#    err_b = 2*(k_mean*(k_mean+1)/nsamp)**0.5/log(10,exp(1.0)) #--- Guttorp and Hopkins (1986) eq.(4)

    
    
#     #--- bvalue bhatacharya eq. 6
#     #--- accumulated histogram
#     slist = mm_list[ mm_list>=mc_new ]
#     slist.sort()
#     NN = len(slist)
#     cdf = histogramACCUMLTD( slist.tolist() )
#     #--- bvalue bhatacharya eq. 6
#     n_cdf=len(cdf)-1
#     bi = []
#     for i in xrange(n_cdf):
#         mi = cdf[ i ][ 0 ]
#         Ni = NN - cdf[ i ][ 2 ]
#         bij = []
#         for j in xrange(n_cdf):
#             mj = cdf[ j ][ 0 ]
#             Nj = NN - cdf[ j ][ 2 ]
#             if i == j: continue
#             slope = ( log( Nj, 10 ) - log( Ni, 10 ) ) / ( mj - mi )
#             assert not np.isnan(slope)
#             bij.append( slope )
#         bi.append( np.median( bij ) )
#     bval_2nd = -np.median( bi )
#     err_b2nd = np.std(bi)/(len(bi))**0.5
    
    #--- plot b vs mc
    ax2.errorbar([mc_new],[b_value],yerr=err_b,marker='o',color='black')
#    ax2.errorbar([mc_new],[bval_2nd],yerr=err_b2nd, marker='s',color='green')
    
    #---
    MC.append(mc_new)
    B.append(b_value)
    Err_B.append(err_b)

    #--- p-value (by hand)
    (dmax,pval2nd) = scipy.stats.kstest( mlist, lambda x: 1.0-10**(-b_value*(x-mc_new)))
    ax4.plot([mc_new],[pval2nd],marker='o',color='red')
    pval.append(pval2nd)
#    print b_value, pval2nd 

xmin = min(MC)
xmax = max(MC)
ax2.set_xticks(np.linspace(xmin,xmax,int(xmax-xmin)/.2))
ax2.set_xlim(xmin,xmax)


#--- choose the range of mc
mc_range=[1.5,1.5]
index_tf = [i and j for i, j in zip(mc_range[0]<=np.array(MC),np.array(MC)<mc_range[1])]
bvall =  np.array(B)[index_tf]
Err_B =  np.array(Err_B)[index_tf]

#--- b vs mc: vertical lines
#ax2.plot([mc_range[0],mc_range[0]],[np.min(B),np.max(B)],'--r')    
#ax2.plot([mc_range[1],mc_range[1]],[np.min(B),np.max(B)],'--r')    

#--- N vs m: vertical line
#ax.plot([mc_range[0]+DM/2,mc_range[0]+DM/2],
 #       [1,len(swarm_copy['magnitude'])],'--r')    

#--- n*10*bm (rescaled)
bval=0.8 #np.mean(bvall)
ax2.plot([xmin,xmax],[bval,bval],'--r')
#bval2=0.8
ax3=fig.add_subplot(133)
ax3.set_xlabel('$m$')
#ax3.set_ylabel(r'$N\times10^{%s\times m}$'%bval)
ax3.set_ylabel(r'$N\times10^{%s\times m}$'%bval)
ax3.yaxis.set_label_position("right")
ax3.yaxis.tick_right()
ax3.set_yscale('log')
xmin = np.floor(swarm_copy['magnitude'].min())
xmax = np.ceil(swarm_copy['magnitude'].max())
ax3.set_xticks(np.linspace(xmin,xmax,int(xmax-xmin)/1+1))
ax3.set_xlim(xmin,xmax)
#ax3.set_ylim(1e3,1e6)
#ax3.set_xticks(np.linspace(ax3.axis()[0],ax3.axis()[1],4))
for ikey in keys: #--- rescaled
    c = 10**(bval*d[ikey][0])
    ax3.plot([d[ikey][0],d[ikey][1]],
             [c*(N-d[ikey][2]),c*(N-d[ikey][2])],'-',color='black',
            ) #--- accumulated
#     c = 10**(bval2*d[ikey][0])
#     ax3.plot([d[ikey][0],d[ikey][1]],
#              [c*(N-d[ikey][2]),c*(N-d[ikey][2])],'-',color='red',
#              ) #--- accumulated
ax3.plot([mc_range[0]+DM/2,mc_range[0]+DM/2], #--- vertical line
        [len(swarm_copy[swarm_copy['magnitude']>mc_range[0]])*10**(bval*mc_range[0])/10,
         len(swarm_copy[swarm_copy['magnitude']>mc_range[0]])*10**(bval*mc_range[0])],'--r')    
#plt.plot(MC, pval,'-o',color='black')

# mc_new = 0.5
# b_value = 0.94
# ax2.scatter([mc_new],[b_value],marker='s',color='green')
# mlist = swarm_copy[ swarm_copy['magnitude']-mc_new>=0.0]['magnitude'] 
# (dmax,pval2nd) = scipy.stats.kstest( mlist, lambda x: 1.0-10**(-b_value*(x-mc_new)))
# ax4.plot([mc_new],[pval2nd],marker='o',color='green')


fig.tight_layout(w_pad=-0.5)
#fig.savefig('%s/b.png'%DIR_OUTPT_figs,dpi=75)
plt.show()
In [21]:
##--- estimate b and mc
#
#
#with path.Path('/Users/Home/Desktop/Tmp/txt/myPYlibs'):
#    import plfit
#    import plvar
#    import plpva
#
##--- set min/max time to avoid temporal incompletenesss issue
#swarm_copy = swarm.copy()#DataFrameSubSet( swarm, 
#                          #   'date', 
#                           #  ( myTlohi.lo, 
#                            #   myTlohi.hi ) )
#
##--- power law fit clauset
#n_samp = 1000
#n_chunk = len( swarm_copy ) / n_samp
#
#i = 0
#fig = plt.figure(figsize=(8,4))
#ax = fig.add_subplot(111)
#ax.set_ylabel('m')
#ax2=ax.twinx()
#ax.scatter(swarm_copy['date'],swarm_copy['magnitude'],
#          alpha=0.1)
#
#fig2 = plt.figure(figsize=(8,4))
#ax21 = fig2.add_subplot(111)
#ax21.set_ylabel('m')
#ax21.scatter(swarm_copy['date'],swarm_copy['magnitude'],
#          alpha=0.1)
#
#MC = []
#B = []
#for j in xrange(n_chunk):
#    Mlist = 10**(swarm_copy.iloc[i:i+n_samp]['magnitude'])
#    fit = plfit.plfit( np.array(Mlist) ) #--- clauset
#    mc = log(fit[1],10) #--- completeness mag.
#    bval = fit[0]-1
#    MC.append(mc)
#    B.append(bval)
##    print mc, bval
#    ax2.plot([swarm_copy.iloc[i:i+n_samp]['date'].min(),swarm_copy.iloc[i:i+n_samp]['date'].max()],
#            [bval,bval],'-',color='red')
#
#    ax21.plot([swarm_copy.iloc[i:i+n_samp]['date'].min(),swarm_copy.iloc[i:i+n_samp]['date'].max()],
#            [mc,mc],'-',color='red')
#
#    i += n_samp
#Mlist = 10**(swarm_copy.iloc[i:i+len(swarm_copy)%n_samp]['magnitude'])
#fit = plfit.plfit( np.array(Mlist) ) #--- clauset
#mc = log(fit[1],10) #--- completeness mag.
#bval = fit[0]-1
#MC.append(mc)
#B.append(bval)
#
##---- plot bval
#ax2.plot([swarm_copy.iloc[i:i+n_samp]['date'].min(),swarm_copy.iloc[i:i+n_samp]['date'].max()],
#            [bval,bval],'-',color='red')
#ax2.tick_params(colors='red')
#ax2.set_ylabel('b',color='red')
#ax.set_xlabel('Date')
##print mc, bval
#fig.savefig('%s/b.png'%DIR_OUTPT_figs,dpi=75)
##--- plot mc
#ax21.plot([swarm_copy.iloc[i:i+n_samp]['date'].min(),swarm_copy.iloc[i:i+n_samp]['date'].max()],
#            [mc,mc],'-',color='red',label='mc')
#plt.legend()
#ax21.set_xlabel('Date')
#
##n, bins, patches = plt.hist(B, 50)
#
##--- error estimate
##[ error_b, error_mc, ntail ] = [ 0.0, 0.0, 0.0 ]
##[error_b, error_mc, ntail] = plvar.plvar(np.array(Mlist), 'silent')
##error_mc = log(1.0+error_mc/fit[1],10) #--- log(1+dM/M)
#plt.savefig('%s/mc.png'%DIR_OUTPT_figs,dpi=75)
In [22]:
# #--- goodness of the fit

# n_samp = 1000
# n_chunk = len( swarm_copy ) / n_samp
# ncols=4
# nrows = n_chunk / ncols + int(np.ceil(1.0*( len(swarm_copy) % n_samp) / n_samp))
# i = 0
# plt.figure(figsize=(ncols*4,nrows*4))
# for j in xrange(n_chunk):
#     Mlist = 10**(swarm_copy.iloc[i:i+n_samp]['magnitude'])
#     fit = plfit.plfit( np.array(Mlist) ) #--- clauset
#     mc = log(fit[1],10) #--- completeness mag.
#     bval = fit[0]-1
# #    [error_b, error_mc, ntail] = plvar.plvar(Mlist, 'silent') #--- error
#     plt.subplot(nrows,ncols,j+1)
#     Mlist=np.log10(Mlist)
#     nc = len(Mlist[Mlist>=mc])
#     error_b = (bval / nc / log(10.0) )**0.5
#     hist,bins = np.histogram(Mlist,bins=1024)
#     plt.plot(bins[:-1],-hist.cumsum()+len(Mlist))#,marker='o',markersize=6,linestyle='None')
#     xc=0.5*(bins[:-1]+bins[1:])
#     plt.plot(xc, len(Mlist[Mlist>=mc])*10**(-bval*(xc-mc)),'r',label='$b=%2.1f\pm%3.2f$'%(bval,error_b))
#     plt.plot([mc,mc],[1,len(Mlist[Mlist>=mc])],label='$mc=%2.1f$'%(mc)) # len(Mlist[Mlist>=mc])*10**(-bval*(xc-mc)),'r')
#     plt.yscale('log')
#     plt.xlim(swarm_copy['magnitude'].min(),swarm_copy['magnitude'].max())
#     plt.ylim(1,n_samp*10)
#     if j == 0:
#         plt.ylabel('N(size>m)')
#         plt.xlabel('m')
#     plt.legend()
#     i += n_samp

# Mlist = 10**(swarm_copy.iloc[i:i+len(swarm_copy)%n_samp]['magnitude'])
# fit = plfit.plfit( np.array(Mlist) ) #--- clauset
# mc = log(fit[1],10) #--- completeness mag.
# bval = fit[0]-1
# Mlist=np.log10(Mlist)
# nc = len(Mlist[Mlist>=mc])
# error_b = (bval / nc / log(10.0) )**0.5
# hist,bins = np.histogram(Mlist,bins=1024)

# plt.subplot(nrows,ncols,j+2)
# plt.plot(bins[:-1],-hist.cumsum()+len(Mlist))#,marker='o',markersize=6,linestyle='None')
# xc=0.5*(bins[:-1]+bins[1:])
# plt.plot(xc, len(Mlist[Mlist>=mc])*10**(-bval*(xc-mc)),'r',label='$b=%2.1f\pm%3.2f$'%(bval,error_b))
# plt.plot([mc,mc],[1,len(Mlist[Mlist>=mc])],label='$m_c=%2.1f$'%(mc)) # len(Mlist[Mlist>=mc])*10**(-bval*(xc-mc)),'r')
# plt.yscale('log')
# plt.xlim(swarm_copy['magnitude'].min(),swarm_copy['magnitude'].max())
# plt.ylim(1,n_samp)
# plt.legend()

# plt.savefig('%s/b_mult.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
    
In [23]:
# #--- plot complete catalog

# def ConvertDailyRate(hist, bin_edges ):
# #---convert data to daily rate     
#     t0 = pd.to_datetime( bin_edges[ 0 ] )
#     t1 = pd.to_datetime( bin_edges[ 1 ] )
#     delta_t = ( t1 - t0 ).total_seconds() / ( 60 * 60*24)
#     hist *= ( bin_edges[ 1 ] - bin_edges[ 0 ] ) / delta_t

# def ActivityRate( swarm ):
#     nbins = int( (swarm['date'].max()-swarm['date'].min()).days*4 ) #--- number of bins
    
#     tmax = swarm['date'].max().value #--- min/max
#     tmin = swarm['date'].min().value
    
#     hist, bin_edges = np.histogram(swarm['date'].apply(lambda x:x.value),                                   
#                     bins=np.linspace(tmin,tmax,nbins+1,endpoint=True),density=True) #--- probability dist.
#     hist *= len( swarm['date'] ) #--- int(hist).dt=n
#     cumm_number = np.cumsum(hist)*(bin_edges[1]-bin_edges[0]) #--- accumulated number
#     ConvertDailyRate( hist, bin_edges ) #--- daily activity
#     return bin_edges, hist, cumm_number
    
    
# #---------------------------------------------------------------------------------
# #-----------------
# #-----------------
# #-----------------
# #---------------------------------------------------------------------------------
   
# #--- completeness
# mc = 1.5



# #--- t0<t<t1
# #--- set min/max time to avoid temporal incompletenesss issue
# swarm_tmp = swarm.copy()#DataFrameSubSet( swarm, 
#                          #    'date', 
#                           #   ( swarm['date'].min(), 
#                            #    pd.to_datetime('2010-07-31') ) )
# #--- m > mc
# swarm_lohi = swarm_tmp.copy() #DataFrameSubSet( swarm_tmp, 
#                             # 'magnitude', 
#                              #( mc, sys.maxint ) ) 

# #--- spatial map
# #swarm_lohi.plot.scatter('longitude','latitude',
# #                        s=3**(swarm_lohi['magnitude']),
# #                        c='date',cmap='jet',
# #                        alpha = 0.4) #--- plot
    
# #--- temporal map
# #ax1.set_xlim(swarm_tmp['date'].min(),swarm_tmp['date'].max())
# for ii in xrange(10):
#     fig = plt.figure(figsize=(8,4))
#     ax1=fig.add_subplot(111)

#     #plt.xlabel('Time')
#     #plt.ylabel('M')

#     ax1.set_ylim(0,6)
#     tt=swarm[swarm['magnitude']==swarm['magnitude'].sort_values(ascending=False).iloc[ii]]['date'].iloc[0]
#     ax1.set_xlim(tt-datetime.timedelta(days=15),tt+datetime.timedelta(days=15))# pd.to_datetime('2014-12-04'),pd.to_datetime('2015-06-1'))
    
#     ax1.tick_params(axis='both',labelsize=18)
#     #ax1.tick_params(axis='x',rotation=18)
    
#     ax1.scatter(swarm_lohi['date'],swarm_lohi['magnitude'],
#                 s=2*np.exp(swarm_lohi['magnitude']),
#                 alpha=0.04,color='black')
        
#     #--- activity rate
#     bin_edges, hist, cumm_number = ActivityRate( swarm_lohi )
    
#     #--- plot
#     ax2 = ax1.twinx()  # instantiate a second axes that shares the same x-axis
#     ax2.plot(pd.to_datetime(bin_edges[:-1]),hist,'r-')
    
#     ax2.tick_params(axis='y',labelsize=18,colors='red')
    
#     ax2.set_yscale('log')
    
#     #ax2.set_ylim(1e1,1e3)
    
#     ax2.xaxis.set_major_formatter(dates.DateFormatter('%b %y'))
    
#     fig.autofmt_xdate()

# fig.savefig('%s/timeSeries_ok.png'%DIR_OUTPT_figs,bbox_inches='tight')
# plt.show()
In [24]:
#--- plot complete catalog

def ConvertDailyRate(hist, bin_edges ):
#---convert data to daily rate     
    t0 = pd.to_datetime( bin_edges[ 0 ] )
    t1 = pd.to_datetime( bin_edges[ 1 ] )
    delta_t = ( t1 - t0 ).total_seconds() / ( 60 * 60 )
    hist *= ( bin_edges[ 1 ] - bin_edges[ 0 ] ) / delta_t

def ActivityRate( swarm ):
    nbins = int( (swarm['date'].max()-swarm['date'].min()).days * 8 ) #--- number of bins
    
    tmax = swarm['date'].max().value #--- min/max
    tmin = swarm['date'].min().value
    
    hist, bin_edges = np.histogram(swarm['date'].apply(lambda x:x.value),                                   
                    bins=np.linspace(tmin,tmax,nbins+1,endpoint=True),density=True) #--- probability dist.
    hist *= len( swarm['date'] ) #--- int(hist).dt=n
    cumm_number = np.cumsum(hist)*(bin_edges[1]-bin_edges[0]) #--- accumulated number
    ConvertDailyRate( hist, bin_edges ) #--- daily activity
    return bin_edges, hist, cumm_number
    
    
#---------------------------------------------------------------------------------
#-----------------
#-----------------
#-----------------
#---------------------------------------------------------------------------------
   
#--- completeness
#mc = 1.5



#--- t0<t<t1
myTlohi = Tlohi( pd.to_datetime('2019-07-04'), 
                 pd.to_datetime('2019-07-16')) #swarm['date'].max())

#--- set min/max time to avoid temporal incompletenesss issue
swarm_tmp = DataFrameSubSet( swarm, 
                             'date', 
                             ( myTlohi.lo, 
                               myTlohi.hi ) )
#--- m > mc
swarm_lohi = swarm_tmp.copy() #DataFrameSubSet( swarm_tmp, 
                            # 'magnitude', 
                             #( mc, sys.maxint ) ) 

#--- spatial map
#swarm_lohi.plot.scatter('longitude','latitude',
#                        s=3**(swarm_lohi['magnitude']),
#                        c='date',cmap='jet',
#                        alpha = 0.4) #--- plot
    
#--- temporal map
fig = plt.figure(figsize=(8,3))#,dpi=150)
ax1=fig.add_subplot(111)

#plt.xlabel('Time')
#plt.ylabel('M')

ax1.set_ylim(0,7.2)
ax1.set_xlim(swarm_tmp['date'].min(),swarm_tmp['date'].max())

ax1.tick_params(axis='both',labelsize=18)
ax1.tick_params(axis='x',rotation=30)

ax1.scatter(swarm_lohi['date'],swarm_lohi['magnitude'],
            s=2*np.exp(swarm_lohi['magnitude']),
            alpha=0.04,color='black')

DrawFrame(ax1, (0.11,0.07),(0.32,0.1),0.01)

#--- activity rate
bin_edges, hist, cumm_number = ActivityRate( swarm_lohi )

#--- plot
ax2 = ax1.twinx()  # instantiate a second axes that shares the same x-axis
ax2.plot(pd.to_datetime(bin_edges[:-1]),hist,'r-',linewidth=1)

ax2.tick_params(axis='y',labelsize=18,colors='red')

#ax2.set_yscale('log')

#ax2.set_ylim(1e1,3e2)

ax2.xaxis.set_major_formatter(dates.DateFormatter('%d %b %y'))

#fig.autofmt_xdate(bottom=.1)

#fig.savefig('%s/timeSeries_ok.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')
plt.show()
In [25]:
#--- evaluate fractal dimension

import geopy.distance
from math import *
import random as rnd

class bins:
    def __init__(self, nBin, xlo, xhi, ydim = 1, err = None):
        self.lo = xlo - 1.0e-10
        self.hi = xhi + 1.0e-10
        self.dx = (self.hi-self.lo)/nBin
        self.xlist = [0.0 for i in xrange(nBin)]
        self.kounter = [0 for i in xrange(nBin)]
        self.ylist = [[0.0 for j in xrange(ydim)] for i in xrange(nBin)]
        self.nBin = nBin
        self.err = err
        self.max_y = [[-sys.maxint for j in xrange(ydim)] for i in xrange(nBin) ]
        self.min_y = [[sys.maxint for j in xrange(ydim)] for i in xrange(nBin)]
        if err:
            self.ySQlist = [[0.0 for j in xrange(ydim)] for i in xrange(nBin)]
    def GetBin(self,x):
        return int(floor((x-self.lo)/self.dx))
    def whichBin(self,x,y, ibin=[] ):
        assert x >= self.lo, 'x=%s,self.lo=%s'%(10**x,10**self.lo)
        assert x < self.hi, 'x=%s,self.hi=%s'%(10**x,10**self.hi)
        nr = int(floor((x-self.lo)/self.dx))
        if ibin:
            ibin[ 0 ] = nr
        self.kounter[nr] += 1
        self.xlist[nr] += x
        for idim in xrange(len(y)):
            self.ylist[nr][idim] += y[idim]
            if y[idim] >= self.max_y[nr][idim]: #--- set max value
                self.max_y[nr][idim]=y[idim]
            if y[idim] <= self.min_y[nr][idim]:
                self.min_y[nr][idim]=y[idim]
            if self.err:
                self.ySQlist[nr][idim] += y[ idim ] * y[ idim ]

    def res(self, logScaleX = None, logScaleY = None, base = 10, SUM = None, MINMAX=None ):
         indices = xrange(10**6)
         someList = []
         for x,index in zip(self.xlist,indices):
             nb = self.kounter[index]
             if nb == 0: continue
             xbar = self.xlist[index]/nb
             ybar = [y/nb for y in self.ylist[index]]
             if self.err:
                 sigmaY = [ ( ysq / nb - YBAR * YBAR ) ** 0.5 / nb ** 0.5 for ysq, YBAR in zip( self.ySQlist[ index ], ybar )]
                 if SUM:
                     sigmaY = [ i * nb for i in sigmaY ]
             if SUM:
                 ybar = [y for y in self.ylist[index]]
             if MINMAX:
                 MAX_y = [y for y in self.max_y[index]]
             if logScaleX:
                 xbar = base ** xbar
             if logScaleY:
                 ybar = [ base ** item for item in ybar ]
             if self.err:
                 someList.append([ xbar, ybar, sigmaY ])
             elif MINMAX:
                 someList.append([ xbar, ybar, MAX_y ])
             else:
                 someList.append([ xbar, ybar ])
         return someList


class histogram( bins ):
    def res( self, Radial = None, logScale = None, normalize = True, base = 10.0, ACCUMLTD = None ):
        PDF = []
        self.nPoint = nPoint = sum( self.kounter )
        indices = xrange( sys.maxint )
        y_accm = nPoint
        for y, index in zip( self.kounter, indices ):
            if not ACCUMLTD and y == 0:
                continue
            if not y == 0:
                x = self.xlist[ index ] / y #self.lo + index * self.dx
            else:
                x = self.lo + index * self.dx
            Y = 1.0 * y
            dx = self.dx
            if logScale:
                x = base ** x
                dx = x * ( base ** self.dx - 1.0 )
#               print Y, dx
            if normalize:
                Y /= ( nPoint * dx )
                if Radial:
                    Y /= ( 2.0 * pi * x )
#           PDF.append( [ x, Y ] )
#           PDF.append( [ x + dx, Y ] )
#           PDF.append( [ x + 0.5 * dx, Y, 0.0, ( 1.0 * y_accm / nPoint if normalize else 1.0 * y_accm )  ] )
            error_std = 0.0
            if self.err:
                error_std = sqrt( nPoint * Y * dx ) / ( nPoint * dx ) #--- poisson
                error_std = sqrt( nPoint * Y * dx * ( 1.0 - Y * dx ) ) / ( nPoint * dx ) #--- bi-nomial
            PDF.append( [ x, Y, 0.0, ( 1.0 * y_accm / nPoint if normalize else 1.0 * y_accm ), error_std ] )
            y_accm -= y
        return PDF

def GetCartesian( dff ):
    df = dff.copy()
    xlo = df['longitude'].min()
    xhi = df['longitude'].max()
    ylo = df['latitude'].min()
    yhi = df['latitude'].max()
    getDistX = lambda x: geopy.distance.vincenty( ( 0.0, xlo ), ( 0.0, x ) ).km
    getDistY = lambda y: geopy.distance.vincenty( ( ylo, 0.0 ), ( y, 0.0 ) ).km
    df[ 'x(km)' ] = df[ 'longitude' ].apply( getDistX ) 
    df[ 'y(km)' ] = df[ 'latitude' ].apply( getDistY ) 
    df[ 'z(km)' ] = df['depth']
    return df

def fractalDimension2nd( coord ):
    #--- sort
    if type( coord ) == type( [] ):
        coord = ListToDict( coord )
    points = coord.keys()
    points.sort()
    hsObject = histogram( 18 * 8, log( 1e-10,10), log( 1e8, 10 ) )
    for point_i in points:
        for point_j in points:
            if not point_i < point_j: #--- pairs ij with i<j
                continue
            rij = sum( [ ( i - j ) ** 2 for i, j in zip( coord[ point_i ], coord[ point_j ] ) ] ) # ** 0.5
            assert rij > 0, 'rij=%s,coord[ %s ]=%s, coord[ %s ]=%s' %(rij,point_i,coord[ point_i ], point_j, coord[ point_j ] )
            hsObject.whichBin( 0.5 * log( rij,10 ), [ 1.0 ] )
    for items in hsObject.res( logScale = True, normalize = True, ACCUMLTD = True ):
        if items[ 3 ] > 0.0: 
            yield items[ 0 ], items[ 3 ]

#---------------------------------------------------------------------------------
#-----------------
#-----------------
#-----------------
#---------------------------------------------------------------------------------
mc = 2.0

#--------------------
#----- subset
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi = swarm_lohi.sample( n = 2000 ) #--- sample

#--------------------
#--- cartesian coords
#--------------------
swarm_cartesian = GetCartesian( swarm_lohi )

#--------------------
#--- evaluate df
#--------------------
tmp_coord = swarm_cartesian['x(km)'].apply(lambda x: [x+rnd.random()*1e-6]) +\
            swarm_cartesian['y(km)'].apply(lambda x: [x+rnd.random()*1e-6]) +\
            swarm_cartesian['z(km)'].apply(lambda x: [x+rnd.random()*1e-6])
tmp_coord = tmp_coord.to_dict()
dict_NR = fractalDimension2nd( tmp_coord ) #, dmin = 1.0e-02 )

#--------------------
#--- scattered plot
#--------------------
swarm_cartesian.plot.scatter('x(km)','y(km)',
                        s=3**(swarm_lohi['magnitude']),
                        c='date',cmap='jet',
                        alpha = 0.4) 
#plt.savefig('%s/map2d.png'%DIR_OUTPT_figs,bbox_inches='tight')
#--------------------
#--- N(r) vs r
#--------------------
plt.figure( figsize = (4,4))
plt.xlabel('r(km)')
plt.ylabel('N(r)')
plt.xlim(1e-3,1e2)
plt.ylim(1e-8,1)
plt.yscale('log')
plt.xscale('log')
d_f = 2.2 #1.7
for i in dict_NR:
    plt.plot([i[ 0 ]],
             [1-i[ 1 ]],
             'o',color='black') #--- accumulated
    plt.plot(i[ 0 ],
             (1-i[ 1 ])/i[0]**d_f,
             '.',color='red') #--- accumulated
plt.show()
In [26]:
#--- evaluate fractal dimension (correltion dimension )

def getRmat2d( df_complete ):
    nmax = len( df_complete )
    rmat = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    for i in xrange( nmax ):
        #--- distance matrix
        df_dx = df_complete[ 'x(km)' ] - df_complete[ 'x(km)' ][ i ]
        df_dy = df_complete[ 'y(km)' ] - df_complete[ 'y(km)' ][ i ]
        df_sq = ( df_dx*df_dx+df_dy*df_dy ) ** 0.5
        df_sq[ : i ] = 0
        rmat[ i ] = np.array(df_sq)
    return np.array( rmat ) 

def getRmat3d( df_complete ):
    nmax = len( df_complete )
    rmat = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    for i in xrange( nmax ):
        #--- distance matrix
        df_dx = df_complete[ 'x(km)' ] - df_complete[ 'x(km)' ][ i ]
        df_dy = df_complete[ 'y(km)' ] - df_complete[ 'y(km)' ][ i ]
        df_dz = df_complete[ 'z(km)' ] - df_complete[ 'z(km)' ][ i ]
        df_sq = ( df_dx*df_dx+df_dy*df_dy+df_dz*df_dz ) ** 0.5
        df_sq[ : i ] = 0
        rmat[ i ] = np.array(df_sq)
    return np.array( rmat ) 

#def GetCartesian( dff ):
#    df = dff.copy()
#    xlo = df['longitude'].min()
#    xhi = df['longitude'].max()
#    ylo = df['latitude'].min()
#    yhi = df['latitude'].max()
#    getDistX = lambda x: geopy.distance.vincenty( ( 0.0, xlo ), ( 0.0, x ) ).km
#    getDistY = lambda y: geopy.distance.vincenty( ( ylo, 0.0 ), ( y, 0.0 ) ).km
#    df[ 'r(km)' ] = df[ 'longitude' ].apply( getDistX ) + df[ 'latitude' ].apply( getDistY ) * 1j
#    return df


# --------------------
# ----- subset
# --------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 

#--------------------
#--- cartesian coords
#--------------------
swarm_lohi = GetCartesian( swarm_lohi )

#swarm_lohi = DataFrameSubSet( swarm_lohi, 
 #                            'x(km)', 
  #                           ( 180,300)) 
#swarm_lohi = DataFrameSubSet( swarm_lohi, 
 #                            'y(km)', 
  #                           ( 100,250)) 

                              
#--- we need the distance matrix! 
#rmat2d = getRmat2d( swarm_lohi )
rmat3d = getRmat3d( swarm_lohi )
#r_list2d = rmat2d[rmat2d.nonzero()]
r_list3d = rmat3d[rmat3d.nonzero()]

# #--- histogram
r_min = log(np.min(r_list3d),10) #min(log(np.min(r_list2d),10),log(np.min(r_list3d),10))
r_max = log(np.max(r_list3d),10) #max(log(np.max(r_list2d),10),log(np.max(r_list3d),10))
nbins = int(ceil(r_max - r_min))*4
#2d
hist3d, edges = np.histogram(r_list3d,
                         bins=np.logspace(r_min, r_max, nbins) )
r_bins, edges = np.histogram(r_list3d,
                         bins=np.logspace(r_min, r_max, nbins), weights = r_list3d )

r_bins3d = r_bins[hist3d != 0]
hist3d = hist3d[hist3d != 0]
r_bins3d /= hist3d

# #--- normalize
hist_acc_3d = np.cumsum(hist3d)*1.0
hist_acc_3d /= hist_acc_3d[-1]

# #3d
# hist3d, edges = np.histogram(r_list3d,
#                          bins=np.logspace(r_min, r_max, nbins) )
# r_bins, edges = np.histogram(r_list3d,
#                          bins=np.logspace(r_min, r_max, nbins), weights = r_list3d )

# r_bins3d = r_bins[hist3d != 0]
# hist3d = hist3d[hist3d != 0]
# r_bins3d /= hist3d

# #--- normalize
# hist_acc_3d = np.cumsum(hist3d)*1.0
# hist_acc_3d /= hist_acc_3d[-1]

##--------------------
##--- scattered plot
##--------------------
#plt.scatter(swarm_lohi['x(km)'],
#            swarm_lohi['y(km)'],
#                        s=3**(swarm_lohi['magnitude']),
#                        cmap='jet',
#                        alpha = 0.1) 
#                             
##--- plot
##d_f = 1.4
#fig = plt.figure(figsize=(4,4))
#ax = fig.add_subplot(111)
#ax.set_xlim(10**r_min,10**r_max)
#ax.set_xlabel('$r(km)$')
#ax.set_ylabel('$N(r)$')
#ax.set_xscale('log')
#ax.set_yscale('log')
#ax.errorbar( r_bins, hist_acc, 
#            yerr = hist_acc/hist**.5,
#            color='black',marker='o')
#
#ax.errorbar( r_bins, 10000*hist_acc/r_bins**d_f,  #--- rescaled
#            yerr = hist_acc/hist**.5/r_bins**d_f,
#            color='red',marker='o',label='$N/r^{%s}$'%d_f)
#ax.legend()
#
#fig.savefig('%s/corrDim.png'%DIR_OUTPT_figs,bbox_inches='tight')
In [27]:
#--- plot
d_f = 2.1 #1.7
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.axis([1e-5,1e3,1e-8,1e0])
ax.set_xlabel('$r(km)$')
ax.set_ylabel('$N(r)$')
ax.set_xscale('log')
ax.set_yscale('log')
ax.errorbar( r_bins3d, hist_acc_3d, 
            yerr = hist_acc_3d/hist3d**.5,
            color='black',marker='o')
# ax.errorbar( r_bins3d, hist_acc_3d, 
#             yerr = hist_acc_3d/hist3d**.5,
#             color='red',marker='s')

#ax.errorbar( r_bins, 10000*hist_acc/r_bins**d_f,  #--- rescaled
#            yerr = hist_acc/hist**.5/r_bins**d_f,
#            color='red',marker='o',label='$N/r^{%s}$'%d_f)
ax.plot(r_bins3d, r_bins3d**d_f,
        '-.r',label='$r^{%s}$'%d_f)

ax.legend()

#fig.savefig('%s/corrDim.png'%DIR_OUTPT_figs,bbox_inches='tight')
plt.show()
In [28]:
#--- evaluate fractal dimension (box counting )

import geopy.distance
from math import *
import random as rnd
#import warnings

#warnings.filterwarnings('ignore') #--- get rid of warnings

#def GetCartesian( dff ):
#    df = dff.copy()
#    xlo = df['longitude'].min()
#    xhi = df['longitude'].max()
#    ylo = df['latitude'].min()
#    yhi = df['latitude'].max()
#    getDistX = lambda x: geopy.distance.vincenty( ( 0.0, xlo ), ( 0.0, x ) ).km
#    getDistY = lambda y: geopy.distance.vincenty( ( ylo, 0.0 ), ( y, 0.0 ) ).km
#    df[ 'r(km)' ] = df[ 'longitude' ].apply( getDistX ) + df[ 'latitude' ].apply( getDistY ) * 1j
#    df[ 'x(km)' ] = df[ 'longitude' ].apply( getDistX )
#    df[ 'y(km)' ] = df[ 'latitude' ].apply( getDistY )
#    return df

#--------------------
#----- subset
#--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# #swarm_lohi = swarm_lohi.sample( n = 1000 ) #--- sample


# #--------------------
# #--- cartesian coords
# #--------------------
# swarm_lohi = GetCartesian( swarm_lohi )
# #swarm_lohi = DataFrameSubSet( swarm_lohi, #--- complete catalog
# #                             'x(km)', 
# #                             ( 200, 1000 ) ) 
# #swarm_lohi = DataFrameSubSet( swarm_lohi, #--- complete catalog
# #                             'y(km)', 
# #                             ( -1000, 220 ) ) 

# #--------------------
# #--- scattered plot
# #--------------------
# plt.figure(figsize=(4,4))
# #plt.xlim(0,np.max(r_list))
# #plt.ylim(-np.max(r_list)/2,np.max(r_list)/2)
# plt.xlabel('x(km)')
# plt.ylabel('z(km)')
# plt.scatter(swarm_lohi['x(km)'],
#             swarm_lohi['z(km)'],
#                         s=3**(swarm_lohi['magnitude']),
#                         cmap='jet',
#                         alpha = 0.01) 
# #--------------------
# #--- box counting
# #--------------------
# #tmp=pd.DataFrame({'x(m)':swarm_lohi['y(m)'],'y(m)':swarm_lohi['z(m)']}) #,'z(m)':swarm_lohi['z(m)']})
# #rmat = getRmat2d( tmp )
# #rmat = getRmat2d( swarm_lohi )

# #r_list = rmat[rmat.nonzero()]
# #r_min = np.min(r_list)
# #r_max = np.max(r_list)
# #
# plt.savefig('%s/map2d.png'%DIR_OUTPT_figs,bbox_inches='tight')
In [29]:
# #--- evaluate fractal dimension (box counting )


# def fractalDimension3d( x, y, z, L, dmin = None ):
#     x = np.array(x)
#     y=np.array(y)
#     z=np.array(z)
# #    xlo = x.min()
# #    xhi = x.max()
# #    ylo = y.min()
# #    yhi = y.max()
# #    zlo = z.min()
# #    zhi = z.max()
# #    L = max( zhi - zlo, yhi - ylo, xhi - xlo )
#     #---
#     xc = x.mean() #--- center
#     yc = y.mean()
#     zc = z.mean()
    
#     xo = xc - L #--- origin
#     yo = yc - L
#     zo = zc - L
# #    L *= 2.0
#     #---
#     kounter = 0  #--- number of divisions
#     dx = dy = dz = L  #--- scale
#     dl = []
#     N = []
#     while dx - dmin > 0.0: #--- do until dx < dmin
#         d = {}
#         index_i = np.array( map(int,np.floor( ( y - yo ) / dy ) ) )
#         index_j = np.array( map( int, np.floor( ( x - xo ) / dx ) ) )
#         index_k = np.array( map( int, np.floor( ( z - zo ) / dz ) ) )
        
#         inside_cell_lo = np.all([index_i>=0,index_j>=0,index_k>=0.0],axis=0) #--- point is inside?
#         inside_cell_hi = np.all([index_i<2**kounter,index_j<2**kounter,index_k<2**kounter],axis=0) #
#         inside_cell = np.all([inside_cell_lo,inside_cell_hi],axis = 0)

        
#         cell_id = index_k * 4 ** kounter + index_i * ( 2**kounter ) + index_j
#         #---
#         dl+=[dx]
#         N+=[len( set( cell_id ) )]
#         #---
#         kounter += 1
#         dx = dy = dz = L / 2**kounter
#     assert dx - dmin <= 0.0
#     #---
#     return np.array(dl), np.array(N)


# def fractalDimension2d( x, y, L, dmin = None ):
#     x = np.array(x)
#     y=np.array(y)
# #    xlo = x.min()
# #    xhi = x.max()
# #    ylo = y.min()
# #    yhi = y.max()
# #    L = max( yhi - ylo, xhi - xlo )
#     #---
#     xc = x.mean() #--- center
#     yc = y.mean()

#     xo = xc - L #--- origin
#     yo = yc - L
# #    L *= 2.0
#     #---
#     kounter = 0  #--- number of divisions
#     dx = dy = L / 2**kounter  #--- scale
#     dl = []
#     N = []
# #    pdb.set_trace()
# #    fig = plt.figure(figsize=(4,4))
# #    ax=fig.add_subplot(111)
# #    plt.scatter(x,y)
# #    ax.add_patch( patches.Rectangle(xy=(xo,yo), width=L, 
# #                                    height=L, linewidth=1,
# #                                    clip_on=False,facecolor=None,edgecolor='black',fill=None) ) 
    
#     while dx - dmin > 0.0: #--- do until dx < dmin
#         d = {}
        
#         index_i = np.array( map(int,np.floor( ( y - yo ) / dy ) ) ) #--- assingn index
#         index_j = np.array( map( int, np.floor( ( x - xo ) / dx ) ) )

#         inside_cell_lo = np.all([index_i>=0,index_j>=0],axis=0) #--- point is inside?
#         inside_cell_hi = np.all([index_i<2**kounter,index_j<2**kounter],axis=0) #
#         inside_cell = np.all([inside_cell_lo,inside_cell_hi],axis = 0)
        
#         index_i = index_i[ inside_cell ] #--- subset of the index array
#         index_j = index_j[ inside_cell ]

#         cell_id = index_i * ( 2**kounter ) + index_j
#         #---
#         dl+=[dx]
#         N+=[len( set( cell_id ) )]
#         #---
#         kounter += 1
#         dx = dy = L / 2**kounter
#     assert dx - dmin <= 0.0
#     #---
#     return np.array(dl), np.array(N)


# fig = plt.figure(figsize=(10,4))
# ax = fig.add_subplot(121)
# ax2 = fig.add_subplot(122)
# ax.set_xlabel('$r(km)$')
# ax.set_ylabel('$N(r)$')
# ax.set_xscale('log')
# ax.set_yscale('log')
# ax2.set_xlabel('$r(km)$')
# ax2.set_ylabel(r'$N(r)\times r^{%s}$'%d_f)
# ax2.set_xscale('log')
# ax2.set_yscale('log')
# dl={}
# N={}
# count = 0
# r_min = np.min(r_list)
# r_max = np.max(r_list)
# for L in np.logspace(log(r_min,2),log(r_max,2),num=6,base=2.0):
# #for L in [2*420.0]:
# #    dl[count], N[count] = fractalDimension2d( swarm_lohi['x(km)'],
# #                                              swarm_lohi['y(km)'], L, dmin=r_min ) # , swarm_lohi['z(m)'], dmin=r_min )
#     dl[count], N[count] = fractalDimension3d( swarm_lohi['x(km)'],
#                                              swarm_lohi['y(km)'], 
#                                              swarm_lohi['z(km)'], 
#                                              L, dmin=r_min ) 

# ##--- try random
# #x=np.array([rnd.random() for i in xrange(1000)])
# #y=0.2*x #[rnd.random() for i in xrange(10000)]
# ##z=x #[rnd.random() for i in xrange(10000)]
# #tmp=pd.DataFrame({'x(m)':x,'y(m)':y})#,'z(m)':z})
# #rmat = getRmat2d( tmp )
# #r_list = rmat[rmat.nonzero()]
# #r_min = min(r_list)
# #sdict = fractalDimension2d( x,y,
# #                           dmin=r_min )
# #plt.scatter(x,y)

# #--- plot
#     #ax.set_xlim(10**r_min,10**r_max)
#     #for key in sdict:
#     ax.plot(dl[count],N[count],'-o')
#     ax2.plot(dl[count],N[count]*dl[count]**d_f,marker='o',label='$L=%2.1e$'%L)#,linestyle='None')
#     count+=1
# ax2.legend(bbox_to_anchor=(1, 1))
# fig.tight_layout()

# fig.savefig('%s/boxCount.png'%DIR_OUTPT_figs,bbox_inches='tight')
    
In [30]:
Df = 2.1 #1.7 #2.2 #2.0 #1.7
bval = 0.8
mc=2.0 #1.5
In [31]:
#--- trig analysis (new) (it's a large data sets. run the simulation on a cluster and use the outputs)

from scipy.sparse import lil_matrix
import time
from IPython.display import display
import datetime
import pdb
import numpy.linalg
import numpy as np

def UpperTriang(a):
    il1 = np.tril_indices(a.shape[0])
    a[il1] = 0
    return a
    
def getTmat( df_complete ):
    nmax = len( df_complete )
    prefact = 1.0 / ( 24.0 * 60 * 60 ) #--- daily
    tmat = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    tmat_ut = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    for i in xrange( nmax ):
        df_dt = ( df_complete[ 'date' ] - df_complete[ 'date' ][ i ] ).dt.total_seconds() * prefact #--- time diff between i-th event and all subsequent events	
        tmat[ i ] = np.array(df_dt)
        df_dt[ : i ] = df_dt[ i ] #--- must have an upper triangular matrix 
        tmat_ut[ i ] = np.array(df_dt)

        #---
    return np.array( tmat ), np.array( tmat_ut )


def AddPositionList(df_complete):
    df_complete['r(m)'] = df_complete['x(m)'].apply(lambda x: [x])+ \
                          df_complete['y(m)'].apply(lambda x: [x])+ \
                          df_complete['z(m)'].apply(lambda x: [x])

def getRmat( df_complete ):
    nmax = len( df_complete )
    rmat = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    rmat_ut = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    for i in xrange( nmax ):
        #--- distance matrix
        df_dx = ( df_complete[ 'x(km)' ] - df_complete[ 'x(km)' ][ i ] )
        df_dy = ( df_complete[ 'y(km)' ] - df_complete[ 'y(km)' ][ i ] )
        df_dz = ( df_complete[ 'z(km)' ] - df_complete[ 'z(km)' ][ i ] ) #comment if 2-d
        df_sq = ( df_dx * df_dx + df_dy * df_dy + df_dz * df_dz ) ** 0.5
        rmat[ i ] = np.array(df_sq)
        df_sq[ : i ] = 0
        rmat_ut[ i ] = np.array(df_sq)
    return np.array( rmat ), np.array( rmat_ut )

def getMmat( df_complete ):
    nmax = len( df_complete )
    m_mat = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    m_mat_ut = np.matrix(np.zeros(nmax*nmax).reshape(nmax,nmax))
    for i in xrange( nmax ):
        #--- magnitude
        df_m = pd.Series( [ df_complete[ 'magnitude' ][ i ] ] * nmax )
        m_mat[ i ] = np.array(df_m)
        df_m[ : i ] = 0
        m_mat_ut[ i ] = np.array(df_m)
    return np.array( m_mat ), np.array( m_mat_ut ) 

def vectorizedAnalysis( df_complete ):
    #--- setup t, r, m
    t0 = time.time()
    tmat0, tmat = getTmat( df_complete )
    print 'setting up tmat:%s s'%(time.time() - t0)
    t0 = time.time()
    r_mat0, r_mat = getRmat( df_complete )
    print 'setting up rmat:%s s'%(time.time() - t0)
    t0 = time.time()
    m_mat0, m_mat = getMmat( df_complete )
    print 'setting up m_mat:%s s'%(time.time() - t0)
      
        
#    #--- make upper-triangular matrices    
#    t0 = time.time()
#    tmat = UpperTriang(np.copy(tmat0))
#    r_mat = UpperTriang(np.copy(r_mat0))
#    m_mat = UpperTriang(np.copy(m_mat0))
#    print 'setting up ut matrix:%s s'%(time.time() - t0)
    
    #--- nij
    t0 = time.time()
    NIJ = tmat * r_mat ** ( Df ) * 10 ** ( - bval * ( m_mat - mc ) )
    TIJ = tmat * 10 ** ( - 0.5 * bval * ( m_mat - mc ) ) #--- scaled time
    RIJ = NIJ / TIJ #r_mat ** ( Df ) * 10 ** ( - 0.5 * bval * ( m_mat - mc ) ) #--- scaled time
    print 'setting up NIJ:%s s'%(time.time() - t0)
    nmax = len( df_complete )
    N_sparse = lil_matrix( ( nmax, nmax ) ) #--- sparse matrix
    T_sparse = lil_matrix( ( nmax, nmax ) )
    R_sparse = lil_matrix( ( nmax, nmax ) )
    for junk, j in zip( NIJ, xrange( sys.maxint ) ): 
        if j == 0:
            continue
        x = min( NIJ[ : j, j ][ NIJ[ : j, j ] != 0 ] ) #--- min nij
        assert x != 0.0
        rowi = np.where( NIJ[ : j, j ] == x )[ 0 ][ 0 ] #--- find row
        assert rowi <= j
        N_sparse[ rowi, j ] = x #--- insert
        T_sparse[ rowi, j ] = TIJ[ rowi, j ]
        R_sparse[ rowi, j ] = RIJ[ rowi, j ]    
    return N_sparse, (T_sparse, tmat0 ), ( R_sparse, r_mat0 ), m_mat0


#--------------------
#----- subset
#--------------------
#--- t0<t<t1
# myTlohi = Tlohi( pd.to_datetime('2019-07-04'), 
#                  swarm['date'].max())

# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'date', 
#                              ( myTlohi.lo, myTlohi.hi ) ) 

swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)

#--------------------
#--- cartesian coords
#--------------------
swarm_lohi = GetCartesian( swarm_lohi )

#AddPositionList( swarm_lohi )
swarm_lohi.reset_index(inplace=True,drop=True)

#--- vectorized
t0 = time.time()
N_sparse, (T_sparse,tmat), (R_sparse,rmat), m_mat = vectorizedAnalysis( swarm_lohi )
print 'duration=%s s'%(time.time() - t0)
setting up tmat:7.26244783401 s
setting up rmat:11.0669908524 s
setting up m_mat:6.24378299713 s
setting up NIJ:3.47880101204 s
duration=29.1538589001 s
In [32]:
# #--- random analysis

# def shuffle(amat):
#     ind_list = range(amat.shape[0])
#     np.random.shuffle(ind_list)
#     amat_row = amat[ind_list]
#     return amat_row[:,ind_list]

# def GetNij(rmat,tmat,m_mat,df_complete):
    
#     tmat = shuffle(np.abs(tmat))
#     r_mat = shuffle(rmat)
#     m_mat = shuffle(m_mat)
    
#     #--- nij
#     NIJ = tmat * r_mat ** ( Df ) * 10 ** ( - bval * ( m_mat - mc ) )
#     TIJ = tmat * 10 ** ( - 0.5 * bval * ( m_mat - mc ) ) #--- scaled time
#     RIJ = r_mat ** ( Df ) * 10 ** ( - 0.5 * bval * ( m_mat - mc ) ) #--- scaled time
#     nmax = len( df_complete )
#     N_sparse = lil_matrix( ( nmax, nmax ) ) #--- sparse matrix
#     T_sparse = lil_matrix( ( nmax, nmax ) )
#     R_sparse = lil_matrix( ( nmax, nmax ) )
#     for junk, j in zip( NIJ, xrange( sys.maxint ) ): 
#         if j == 0:
#             continue
            
#         x = min( NIJ[ : j, j ][ NIJ[ : j, j ] != 0 ] ) #--- min nij
#         assert x != 0.0
#         rowi = np.where( NIJ[ : j, j ] == x )[ 0 ][ 0 ] #--- find row
#         N_sparse[ rowi, j ] = x #--- insert
#         T_sparse[ rowi, j ] = TIJ[ rowi, j ]
#         R_sparse[ rowi, j ] = RIJ[ rowi, j ]    
#     return N_sparse, (T_sparse, tmat ), ( R_sparse, r_mat), m_mat

# N_sparse_rnd = {}
# T_sparse_rnd = {}
# R_sparse_rnd = {}
# tmat_rnd = {}
# rmat_rnd = {}
# nsamp = 1
# tmat = np.abs(tmat)
# for i in xrange(nsamp):
#     print i
#     N_sparse_rnd[i], (T_sparse_rnd[i],tmat_rnd[i]), (R_sparse_rnd[i],rmat_rnd[i]), junk = GetNij(rmat,tmat,m_mat,swarm_lohi)

random analysis

In [33]:
#--- random analysis 1st

def shuffleDIC( dataFrame, column = None ):
    junk=dataFrame[column].sample(frac=1).reset_index(drop=True)
    dataFrame[column] = np.array(junk)

def shuffle(df, n=1, axis=0):     
    df = df.copy()
    for _ in range(n):
        df.apply(np.random.shuffle, axis=axis)
    return df

def AssignXYZ(AssignXYZ):
    swarm_shuffled['x(m)'] = swarm_shuffled['r(m)'].apply(lambda x:x.real)
    swarm_shuffled['y(m)'] = swarm_shuffled['r(m)'].apply(lambda x:x.imag)

#--------------------
#--- randomize
#--------------------
swarm_shuffled = swarm_lohi.copy()
#AddPositionList( swarm_shuffled )
#display(swarm_shuffled.head())
shuffleDIC( swarm_shuffled, column = 'magnitude' )
shuffleDIC( swarm_shuffled, column = ['x(km)','y(km)','z(km)'] )
#AssignXYZ(AssignXYZ)
#swarm_shuffled.to_csv('%s/swarm_shuffled_incomplete.csv'%DIR_OUTPT) #--- store

if 1: #not LOAD_MAT:
#--------------------
#--- vectorized
#--------------------
    t0 = time.time()
    N_sparse_rnd, (T_sparse_rnd, tmat_rnd), (R_sparse_rnd,rmat_rnd), junk = vectorizedAnalysis( swarm_shuffled)
    print 'analysis duration=%s s'%(time.time() - t0)

#--------------------
#--- save sparse
#--------------------
#    for mat, title in zip([N_sparse_rnd,T_sparse_rnd,R_sparse_rnd],
#                      ['N_sparse_rnd_matrix.npz','T_sparse_rnd_matrix.npz','R_sparse_rnd_matrix.npz']):
#        scipy.sparse.save_npz('%s/%s'%(DIR_OUTPT,title), mat.tocsr()) #--- output

#    swarm_shuffled.to_csv('%s/swarm_shuffled.csv'%DIR_OUTPT)

#else: #--- load previously-computed matrices 
#    N_sparse_rnd = scipy.sparse.load_npz('%s/N_sparse_rnd_matrix.npz'%DIR_OUTPT )
#    T_sparse_rnd = scipy.sparse.load_npz('%s/T_sparse_rnd_matrix.npz'%DIR_OUTPT )
#    R_sparse_rnd = scipy.sparse.load_npz('%s/R_sparse_rnd_matrix.npz'%DIR_OUTPT )
    
#    swarm_shuffled = pd.read_csv('%s/swarm_shuffled.csv'%DIR_OUTPT)
#    swarm_shuffled_incomplete = pd.read_csv('%s/swarm_shuffled_incomplete.csv'%DIR_OUTPT)
setting up tmat:7.353951931 s
setting up rmat:11.0908768177 s
setting up m_mat:6.24717617035 s
setting up NIJ:3.47981977463 s
analysis duration=29.2826120853 s
In [34]:
# #--- save sparse matrices

# import scipy.sparse

# for mat, title in zip([N_sparse,T_sparse,R_sparse],
#                      ['N_sparse_matrix.npz','T_sparse_matrix.npz','R_sparse_matrix']):
#     scipy.sparse.save_npz('%s/%s'%(DIR_OUTPT,title), mat.tocsr()) #--- output

# for mat, title in zip([N_sparse_rnd[0],T_sparse_rnd[0],R_sparse_rnd[0]],
#                      ['N_sparse_rnd_matrix.npz','T_sparse_rnd_matrix.npz','R_sparse_rnd_matrix']):
#     scipy.sparse.save_npz('%s/%s'%(DIR_OUTPT,title), mat.tocsr()) #--- output

# #swarm_shuffled.to_csv('%s/swarm_shuffled.csv'%DIR_OUTPT)
In [35]:
# #--- load sparse matrices
# #--- 1- run on the cluster: 
# #--- 2- sbatch --mem=8gb --partition=single  --time=02:59:59 -n 1 ./oarScript.sh
# #--- 3- oarScript.sh: source activate conda-env; python ./swarmEMC.py
# #--- 4- copy: scp arc:/home/kamran.karimi1/Project/seismic/dataset/El_Mayor_Cucpah/* ./swarm/dataset/El_Mayor_Cucpah/

# import scipy.sparse
# DIR_OUTPT0 = './runRidgeCrest/Run0'
# N_sparse = scipy.sparse.load_npz('%s/N_sparse_matrix.npz'%DIR_OUTPT0 )
# T_sparse = scipy.sparse.load_npz('%s/T_sparse_matrix.npz'%DIR_OUTPT0 )
# R_sparse = scipy.sparse.load_npz('%s/R_sparse_matrix.npz'%DIR_OUTPT0 )
# #rmat = np.load('%s/rmat.npy'%DIR_OUTPT)
# #tmat = np.load('%s/tmat.npy'%DIR_OUTPT)

# N_sparse_rnd = {}
# T_sparse_rnd = {}
# R_sparse_rnd = {}

# nsamp = 1
# N_sparse_rnd[0] = scipy.sparse.load_npz('%s/N_sparse_rnd_matrix.npz'%DIR_OUTPT0 )
# T_sparse_rnd[0] = scipy.sparse.load_npz('%s/T_sparse_rnd_matrix.npz'%DIR_OUTPT0 )
# R_sparse_rnd[0] = scipy.sparse.load_npz('%s/R_sparse_rnd_matrix.npz'%DIR_OUTPT0 )
# #rmat_rnd = np.load('%s/rmat_rnd.npy'%DIR_OUTPT)
# #tmat_rnd = np.load('%s/tmat_rnd.npy'%DIR_OUTPT)

# #swarm_shuffled = pd.read_csv('%s/swarm_shuffled.csv'%DIR_OUTPT)
# #swarm_shuffled_incomplete = pd.read_csv('%s/swarm_shuffled_incomplete.csv'%DIR_OUTPT)
In [36]:
##--- matrix map of nij & nij_rand
#
#plt.subplot(2,1,1)
#plt.pcolormesh(np.log(N_sparse.toarray()),cmap='jet')
#plt.subplot(2,1,2)
#plt.pcolormesh(np.log(N_sparse_rnd.toarray()),cmap='jet')
In [37]:
# mpl.rcParams.update(mpl.rcParamsDefault)
# warnings.filterwarnings('ignore') #--- get rid of warnings

# rc('text', usetex=True)
# font = {'size'   : 26}
# matplotlib.rc('font', **font)

#---------------------------
#--- compute threshold n_th
#---------------------------

def PLOT(edges, hist, cdf, title ):
    #--- plot
    plt.figure(figsize=(8,4))
    plt.subplot(1,2,1)
#    plt.xlim(1e-13,1)
#    plt.ylim(1e-1,1e13)
    plt.xscale('log')
    plt.yscale('log')
    plt.xlabel('nij')
    plt.ylabel('P(nij)')
    plt.title( title )
    plt.plot(edges[:-1],hist,'.-')
    
    plt.subplot(1,2,2)
#    plt.xlim(1e-13,1)
#    plt.ylim(1e-3,1)
    plt.xscale('log')
    plt.yscale('log')
    plt.xlabel('nij')
    plt.ylabel('cdf')
    plt.plot(edges[:-1],cdf,'.-')
    plt.plot([n_thresh for i in edges[:-1]], cdf)
#---    
def HIST( nij, nbins ):
    #--- histograms
    nij_TxR = nij['T*']*nij['R*']
    hist, edges = np.histogram( nij_TxR,
                            bins=np.logspace(np.log(nij_TxR.min()),np.log(nij_TxR.max()),nbins),
                            density=True)
    #--- accumulated
    cdf, edges = np.histogram( nij_TxR,
                           bins=np.logspace(np.log(nij_TxR.min()),np.log(nij_TxR.max()),nbins,endpoint=True))
    cdf=np.cumsum(cdf)
    #assert cdf[-1] == len(nij_TxR), '%s,%s'%(cdf[-1], len(nij_TxR))
    N = 1.0 * len(nij_TxR)
    cdf = cdf / N
    return edges, hist, cdf

def reset_xticks(nn,ax,(TMIN,TMAX)):
    ax.set_xticks( np.linspace(ax.axis()[0],ax.axis()[1],nn) ) 
    labels=np.linspace(TMIN,TMAX,nn)
    labels = [r'$10^{%i}$'%i for i in labels]
    ax.set_xticklabels(labels)

def reset_yticks(nn,ax,(RMIN,RMAX)):
    ax.set_yticks( np.linspace(ax.axis()[2],ax.axis()[3],nn) ) 
    labels=np.linspace(RMIN,RMAX,nn)
    labels = [r'$10^{%i}$'%i for i in labels]
    ax.set_yticklabels(labels)
                 

def GetTrigListLoHi( nij_trig, col, (tlo,thi), swarm_lohi ):
    list_of_mothers = nij_trig.groupby(by='Parent_id').groups.keys() 
    list_of_mothers.sort()
    tmp_df = swarm_lohi.loc[ list_of_mothers ]
    tmp_df = tmp_df[ (tmp_df[ col ] >= tlo) & 
                 (tmp_df[ col ] < thi) ]
    return nij_trig[ pd.DataFrame([nij_trig['Parent_id'] == i for i in tmp_df.index]).any() ]

def GetInterpolatedData( df0, (TMIN,TMAX),(RMIN,RMAX) ):
    nbins_per_decade = 16
    nbins_x=nbins_y=int(TMAX-TMIN)*nbins_per_decade
    df=df0.copy()

    df['T*']=df[ 'T*' ].apply(lambda x: log(x,10))
    df['R*']=df[ 'R*' ].apply(lambda x: log(x,10))
    heatmap, xedges, yedges = np.histogram2d( df[ 'T*' ], df[ 'R*'], bins=[np.linspace(TMIN,TMAX,nbins_x+1), np.linspace(RMIN,RMAX,nbins_y+1)], normed=True)
    heatmap *= len( df )
    heatmap = gaussian_filter( heatmap, sigma = nbins_per_decade/4 )
    return heatmap

def GetDistance( df1, df2 ):
    df_sq = (np.array( df1['x(km)'] ) - np.array( df2['x(km)'] ) ) ** 2 + \
            (np.array( df1['y(km)'] ) - np.array( df2['y(km)'] ) ) ** 2 + \
            (np.array( df1['z(km)'] ) - np.array( df2['z(km)'] ) ) ** 2 
    return pd.Series( df_sq ** 0.5 )

def RemovePair( dff, cs, catalog ):
    pref = 1.0 / 3600.0 / 24 #--- daily
    df = dff.copy()
    
    series_r = GetDistance(catalog.loc[df['Parent_id']],catalog.loc[df['Event_id']])
     
#    series_r = pd.Series(np.array( catalog.loc[df['Parent_id']]['r(km)'] ) -\
#                         np.array( catalog.loc[df['Event_id']]['r(km)'] ) )
    assert len(series_r[series_r==np.nan]) == 0
    df['R'] = series_r.copy() #abs()

    series_t = pd.Series(np.array( catalog.loc[df['Event_id']]['date'] ) -\
                         np.array( catalog.loc[df['Parent_id']]['date'] ) )
    df['T'] = series_t.apply(lambda x:x.total_seconds()*pref)
    assert len ( df[ df['R'] == 0.0 ] ) == 0, '%s'%display( df[ df['R'] == 0.0 ] )
    assert len ( df[ df['T'] == 0.0 ] ) == 0, '%s'%display( df[ df['T'] == 0.0 ] )
    return df [ df[ 'R' ] <= df[ 'T' ] * cs ], df [ df[ 'R' ] > df[ 'T' ] * cs ]


#--------------------
#----- set parameters
#--------------------
#--- set quantile
quantile = 0.13 #kam 0.5 #0.35 #--- comment if passed by argument
#--- wave speed
cs = 3 * (24*3600) #--- km per day
#--- cut-off time
dt_cut = 0.0 #--- day
#--- plot scattered
# TMIN=-5
# TMAX=3
# RMIN=-4
# RMAX=4


#--------------------
#----- subset (the same as trig. analysis)
#--------------------
#--- t0<t<t1
# myTlohi = Tlohi( pd.to_datetime('2019-07-04'), 
#                  swarm['date'].max())

# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'date', 
#                              ( myTlohi.lo, myTlohi.hi ) ) 

swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)

#--------------------
#--- cartesian coords
#--------------------
swarm_lohi = GetCartesian( swarm_lohi )

#AddPositionList( swarm_lohi )
swarm_lohi.reset_index(inplace=True,drop=True)


#--------------------
#--- store actual data in a dataframe
#--------------------
rows, cols = N_sparse.nonzero()
tlist = T_sparse[rows, cols].toarray()[0]
rlist = R_sparse[rows, cols].toarray()[0]
nij = pd.DataFrame({'Event_id':cols, 'Parent_id':rows,'T*':tlist,'R*':rlist})

#--------------------
#--- store random data in a dataframe
#--------------------
#nij_rnd = {}
for i in xrange(1): #nsamp):
    rows, cols = N_sparse_rnd.nonzero()
    tlist = T_sparse_rnd[rows, cols].toarray()[0]
    rlist = R_sparse_rnd[rows, cols].toarray()[0]
    nnn = len(cols)
    nij_rnd = pd.DataFrame({'Event_id':cols, 'Parent_id':rows,'T*':tlist,'R*':rlist},index=range(i*nnn,(i+1)*nnn))


#--------------------------------------------
#----- only include mother events with 
#----- t_mother between (t_lo, t_hi) 
#--------------------------------------------
#--- actual data
#--- remove pairs with r > cs.t
nij, nij_removed = RemovePair( nij, cs, swarm_lohi )
myTlohi = Tlohi(swarm_lohi['date'].min()+datetime.timedelta(days=dt_cut), #--- set cut-off time
               swarm_lohi['date'].max()+datetime.timedelta(days=-dt_cut)
               )
nij = GetTrigListLoHi( nij, 'date',  #--- t_mother > t_lo
                       ( myTlohi.lo, myTlohi.hi ), 
                           swarm_lohi)
nij.reset_index(inplace=True, drop=True)

#--- random data
#for i in xrange(nsamp):
#    nij_rnd = RemovePair( nij_rnd, cs, swarm_shuffled )
#    nij_rnd = GetTrigListLoHi( nij_rnd, 'date', 
#                       ( myTlohi.lo, myTlohi.hi ), 
#                           swarm_shuffled)
#    nij_rnd.reset_index(inplace=True, drop=True)

#--- compute threshold
N_thresh = []
frames = []
for i in xrange(1): #nsamp):
    frames.append(nij_rnd)
    #--- plot histograms for random data
    edges, hist, cdf = HIST( nij_rnd, 1024 )
#    edges = 0.5*(edges[:-1]+edges[1:])
    N_thresh.append(edges[:-1][cdf>quantile][0])
#n_thresh = np.mean(N_thresh) #--- comment if passed by arg.
print 'n_th=%2.1e+%2.1e' %(n_thresh,np.std(N_thresh) / nsamp ** 0.5)
Nij_rnd = pd.concat([nij_rnd]) #frames)


#--- distribution of nij (actual data)
edges_actual, hist_actual, cdf_actual = HIST( nij, 1024 )
#edges_actual = 0.5*(edges_actual[:-1]+edges_actual[1:])
#PLOT(edges, hist, cdf, 'random')

#--- (Tmin,Tmax)-(Rmin,Rmax)
TMIN=np.floor(np.log10(nij['T*']).min())
TMAX=np.ceil(np.log10(nij['T*']).max())
RMIN=np.floor(np.log10(nij['R*']).min())
RMAX=np.ceil(np.log10(nij['R*']).max())

#--------------------
#--- plot scattered
#--------------------
fig = plt.figure(figsize=(8,4))


#--------------------
#--- interpolated data
#--------------------
for nn, index, title in zip([ nij, Nij_rnd ],xrange(2),['actual','randomized']):
#for nn, index, title in zip([ nij ],xrange(2),['actual']):
    ax = fig.add_subplot(1,2,index+1)
    #--- interpolate
    heatmap=GetInterpolatedData(nn,(TMIN,TMAX),(RMIN,RMAX))
    ax.pcolormesh(heatmap.T,cmap='coolwarm')
    #---
#    ax.set_xlabel('T*')
#    ax.set_ylabel('R*')
#    ax.set_title(title)
    ##--- reset x-ticks
    
    reset_xticks(4,ax,(TMIN,TMAX))
    #--- reset y-ticks
    reset_yticks(4,ax,(RMIN,RMAX))
    #--- nij must be within limits
    nij_copy = nn.copy()
    nij_copy=nij_copy[(nij_copy['T*'] >= 10**TMIN ) &
    (nij_copy['T*'] < 10**TMAX ) &
    (nij_copy['R*'] >= 10**RMIN ) &
    (nij_copy['R*'] < 10**RMAX )]
    #--- mapping
    xx=ax.axis()[0]+(ax.axis()[1]-ax.axis()[0])*(np.log10(nij_copy['T*'])-TMIN)/(TMAX-TMIN)
    yy=ax.axis()[2]+(ax.axis()[3]-ax.axis()[2])*(np.log10(nij_copy['R*'])-RMIN)/(RMAX-RMIN)
    ax.set_xlim(ax.axis()[0],ax.axis()[1])
    ax.set_ylim(ax.axis()[2],ax.axis()[3])
    #--- plot                 
    DrawFrame(ax, (0.25,0.08),(0.16,0.08),0.01)
    ax.scatter(xx,
              yy,
               alpha=1,color='black',s=0.01)
    #--- draw the separation line
    xx=ax.axis()[0]+(ax.axis()[1]-ax.axis()[0])*(np.log10(nij_copy['T*'])-TMIN)/(TMAX-TMIN)
    yy=ax.axis()[2]+(ax.axis()[3]-ax.axis()[2])*(np.log10(n_thresh/nij_copy['T*'])-RMIN)/(RMAX-RMIN)
#    print len(xx)
#    xx=xx[0:-1:300]
#    yy=yy[0:-1:300]
#    print len(xx)
    ax.plot( xx, yy,'-.',color='white',linewidth=1.0)
    
ax.set_title(r'$q=%d,d_f=%s$'%(100*quantile,Df),fontsize='x-small')
#fig.savefig('%s/densityMaps.png'%DIR_OUTPT_figs,bbox_inches='tight',dpi=150)
#---
# mpl.rcParams.update(mpl.rcParamsDefault)
# warnings.filterwarnings('ignore') #--- get rid of warnings
# rc('text', usetex=True)
# font = {'size'   : 20}
# matplotlib.rc('font', **font)
plt.show()
n_th=1.0e-02+0.0e+00
In [38]:
# #--- triggering part of nij
# nij_trig=nij[nij['T*']*nij['R*']<=n_thresh]
# df_triggered = swarm_lohi.iloc[nij_trig['Event_id']]

# #--- background part of nij
# nij_background=nij[nij['T*']*nij['R*']>n_thresh]
# nn = len(nij_background)
# nij_background = nij_background.append(nij_removed,verify_integrity=False) #--- append removed part
# assert len(nij_background) == nn + len(nij_removed)

# #--- background events
# background_child = list(set(nij_background['Event_id'])) #--- children
# #
# nij_copy = nij.copy()
# Nij = nij_copy.append(nij_removed)
# background_root  = list(set(Nij['Parent_id'])-set(Nij['Event_id'])) #--- root
# #
# background = background_root+background_child #--- total
# assert len(background) == len(set(background)) #--- assert no overlap
# #
# df_background = swarm_lohi.iloc[background]

# assert len(df_triggered) + len(df_background) == len(swarm_lohi), '%s != %s' %(len(df_triggered) + len(df_background),len(swarm_lohi))


# #display(nij_trig.info())
# trig_ratio = 1.0*len(df_triggered) / len(swarm_lohi) #cdf_actual[edges_actual[:-1]<=n_thresh][-1]
# print 'trig_ratio=%s'%trig_ratio

repeat for fake catalog

In [39]:
#--- triggering part of nij
nij = Nij_rnd
nij_trig=nij[nij['T*']*nij['R*']<=n_thresh]
df_triggered = swarm_shuffled.iloc[nij_trig['Event_id']]

#--- background part of nij
nij_background=nij[nij['T*']*nij['R*']>n_thresh]
#nn = len(nij_background)
#nij_background = nij_background.append(nij_removed,verify_integrity=False) #--- append removed part
#assert len(nij_background) == nn + len(nij_removed)

#--- background events
background_child = list(set(nij_background['Event_id'])) #--- children
#
nij_copy = nij.copy()
Nij = nij_copy.append(nij_removed)
background_root  = list(set(Nij['Parent_id'])-set(Nij['Event_id'])) #--- root
#
background = background_root+background_child #--- total
assert len(background) == len(set(background)) #--- assert no overlap
#
df_background = swarm_shuffled.iloc[background]

assert len(df_triggered) + len(df_background) == len(swarm_shuffled), '%s != %s' %(len(df_triggered) + len(df_background),len(swarm_shuffled))


#display(nij_trig.info())
trig_ratio = 1.0*len(df_triggered) / len(swarm_shuffled) #cdf_actual[edges_actual[:-1]<=n_thresh][-1]
print 'trig_ratio=%s'%trig_ratio
trig_ratio=0.247379454927

Distributions of $n_{ij}$

In [40]:
#--- histogram
nn=np.log10(nij['R*']*nij['T*'])
nmin=nn.min()
nmax=nn.max()
bins=np.linspace(nmin,nmax,int(nmax-nmin)*8)
hist, edge = np.histogram(nn,bins=bins,normed=True)

#--- accumulated histogram
slist=np.array(nn)
slist.sort()
N = len(slist)
d = histogramACCUMLTD( slist.tolist() )
keys=d.keys()
keys.sort()

xx=[];yy=[]
for ikey in keys:
    xx.append(d[ikey][0])
    yy.append(d[ikey][2])

#------------
#--- plot
#------------    
fig= plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlabel(r'$log n_{ij}$')
ax.set_ylabel(r'$P(log n_{ij})$')
(nmin,nmax) = (min(slist),max(slist))
ax.set_xlim(floor(nmin),ceil(nmax))
#ax.set_xlim(-7,1)
#ax.set_ylim(0,0.6)
#ax.set_xscale('log')
#ax.set_yscale('log')
ax.plot(edge[:-1],hist,'-o',color='black')

# ax2 = ax.twinx()
# ax2.plot(xx,yy,
#         linestyle='-', drawstyle='steps-post',color='red',
#         linewidth=1.0) #--- accumulated
# #ax2.set_xlim(-7,1)
# #ax2.set_ylim(0,1200)

# ax2.tick_params(axis='y',colors='red')
# ax2.set_ylabel('$N(<n_{ij})$',color='red')

np.save('actual',np.c_[edge[:-1],hist])
np.save('acc_actual',np.c_[xx,yy])


#fig.savefig('%s/pn_actual.png'%DIR_OUTPT_figs,bbox_inches='tight')
plt.show()

Distributions of random 𝑛𝑖𝑗

In [41]:
#--- histogram
nn=np.log10(Nij_rnd['R*']*Nij_rnd['T*'])
nmin=nn.min()
nmax=nn.max()
bins=np.linspace(nmin,nmax,int(nmax-nmin)*8)
hist, edge = np.histogram(nn,bins=bins,normed=True)

#--- accumulated histogram
slist=np.array(nn)
slist.sort()
N = len(slist)
d = histogramACCUMLTD( slist.tolist() )
keys=d.keys()
keys.sort()

xx=[];yy=[]
for ikey in keys:
    xx.append(d[ikey][0])
    yy.append(d[ikey][2])

#------------
#--- plot
#------------    
fig= plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlabel(r'$log n_{ij}$')
ax.set_ylabel(r'$P(log n_{ij})$')
(nmin,nmax) = (min(slist),max(slist))
ax.set_xlim(floor(nmin),ceil(nmax))
#ax.set_xlim(-7,1)
#ax.set_ylim(0,0.6)
#ax.set_xscale('log')
#ax.set_yscale('log')
ax.plot(edge[:-1],hist,'-o',color='black')

# ax2 = ax.twinx()
# ax2.plot(xx,yy,
#         linestyle='-', drawstyle='steps-post',color='red',
#         linewidth=1.0) #--- accumulated
# #ax2.set_xlim(-7,1)
# #ax2.set_ylim(0,1200)

# ax2.tick_params(axis='y',colors='red')
# ax2.set_ylabel('$N(<n_{ij})$',color='red')

np.save('random',np.c_[edge[:-1],hist])
np.save('acc_random',np.c_[xx,yy])


#fig.savefig('%s/pn_shuffled.png'%DIR_OUTPT_figs,bbox_inches='tight')
plt.show()
In [42]:
act = np.load('actual.npy')
rnd = np.load('random.npy')
acc_act = np.load('acc_actual.npy')
acc_rnd = np.load('acc_random.npy')

#------------
#--- plot
#------------    
fig= plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlabel(r'$log n_{ij}$')
ax.set_ylabel(r'$P(log n_{ij})$')
(nmin,nmax) = (min(slist),max(slist))
ax.set_xlim(floor(nmin),ceil(nmax))
#ax.set_title('HypoDD')
# ax.set_xlim(-7,1)
# ax.set_ylim(0,0.6)
#ax.set_xscale('log')
#ax.set_yscale('log')
ax.plot(act[:,0],act[:,1],'-o',color='black',label='Actual')
ax.plot(rnd[:,0],rnd[:,1],'-s',color='red',label='Shuffled')
ax.plot([np.log10(n_thresh),np.log10(n_thresh)],[0,0.3])
ax.legend(fontsize='xx-small')

# ax2 = ax.twinx()
# ax2.plot(acc_act[:,0],acc_act[:,1],
#         linestyle='-', drawstyle='steps-post',color='black',
#         linewidth=1.0) #--- accumulated
# ax2.plot(acc_rnd[:,0],acc_rnd[:,1],
#         linestyle='-', drawstyle='steps-post',color='red',
#         linewidth=1.0) #--- accumulated
# # ax2.set_xlim(-7,1)
# # ax2.set_ylim(0,1200)

# ax2.tick_params(axis='y',colors='red')
# ax2.set_ylabel('$N(<n_{ij})$',color='red')


#fig.savefig('%s/pn_actual_shuffled.png'%DIR_OUTPT_figs,bbox_inches='tight',dpi=75)

plt.show()

plot moment tensor and triggered events

In [43]:
from math import *
import numpy as np
import pandas as pd

def arrows(ax,(xc,yc),n1,n3,rr):
    [x1,y1]=np.array([xc,yc])+rr*n3[:2]
    [xo,yo]=np.array([xc,yc])+.1*rr*n3[:2]
    ax.annotate("", (xo,yo), xytext=(x1, y1),
                     textcoords='data',clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 
    [x1,y1]=np.array([xc,yc])-rr*n3[:2]
    [xo,yo]=np.array([xc,yc])-.1*rr*n3[:2]
    ax.annotate("", (xo,yo), xytext=(x1, y1),
                     textcoords='data', clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 
    
    [x1,y1]=np.array([xc,yc])+rr*n1[:2]
    [xo,yo]=np.array([xc,yc])+.1*rr*n1[:2]
    ax.annotate("", (x1,y1), xytext=(xo, yo),
                     textcoords='data', clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3")) 
    [x1,y1]=np.array([xc,yc])-rr*n1[:2]
    [xo,yo]=np.array([xc,yc])-.1*rr*n1[:2]
    ax.annotate("", (x1,y1), xytext=(xo, yo),
                     textcoords='data', clip_on=True,
                 arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3"))     

    
def getN(phi_az,alpha):
    phi_az *= pi/180
    alpha *= pi/180    
#    return np.array([cos(alpha)*cos(phi_az), cos(alpha)*sin(phi_az), sin(alpha) ])
    return np.array([cos(alpha)*sin(phi_az), cos(alpha)*cos(phi_az), sin(alpha) ]) #--- azimuth with respect to north

def tensProd(n1,n2):
    n=3
    return np.array([[n1[i]*n2[j] for j in xrange(3)] for i in xrange(3)])

def arrows(ax,(xc,yc),n1,n3,rr):
    [x1,y1]=np.array([xc,yc])+rr*n3[:2]
    [xo,yo]=np.array([xc,yc])+.1*rr*n3[:2]
    ax.annotate("", (xo,yo), xytext=(x1, y1),
                     textcoords='data',clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 
    [x1,y1]=np.array([xc,yc])-rr*n3[:2]
    [xo,yo]=np.array([xc,yc])-.1*rr*n3[:2]
    ax.annotate("", (xo,yo), xytext=(x1, y1),
                     textcoords='data', clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 
    
    [x1,y1]=np.array([xc,yc])+rr*n1[:2]
    [xo,yo]=np.array([xc,yc])+.1*rr*n1[:2]
    ax.annotate("", (x1,y1), xytext=(xo, yo),
                     textcoords='data', clip_on=True,
                     arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3")) 
    [x1,y1]=np.array([xc,yc])-rr*n1[:2]
    [xo,yo]=np.array([xc,yc])-.1*rr*n1[:2]
    ax.annotate("", (x1,y1), xytext=(xo, yo),
                     textcoords='data', clip_on=True,
                 arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3"))    
    
    
def PlotMomentTensor(trigger_list, swarm_lohi, nij_trig, nmin=30 ):
    for triggerID, xx in zip(trigger_list,xrange(10000)):
        try:
            #--- list of children
            index_trigger = swarm_lohi[swarm_lohi['Event ID']==triggerID].index[0]
            children_index_list = nij_trig[nij_trig['Parent_id']==index_trigger]['Event_id']
            if len(children_index_list) < nmin: 
                continue
            #--- plot moment tensor
            #         #--- parse moment tensor
    #        df_trigger = df_moment[df_moment['Event ID'] == triggerID]
            df_trigger = swarm_lohi[swarm_lohi['Event ID'] == triggerID]
            
            n1 = getN(df_trigger['Azimuth(T)'],df_trigger['Plunge(T)']) #--- principal directions (azimuth, plunge)
            n3 = getN(df_trigger['Azimuth(P)'],df_trigger['Plunge(P)'])
            n2 = np.cross(n3,n1) #getN(df_trigger['Azimuth(N)'],df_trigger['Plunge(N)'])
            if np.isnan(n1[0]):
                continue
            #--- project t, p
            n1 -= n1[2] * np.array([0.0,0.0,1.0]) 
            n3 -= n3[2] * np.array([0.0,0.0,1.0]) 
            
            #--- plot events
            fig = plt.figure(figsize=(4,4))
            ax = fig.add_subplot(111)
    
            x = swarm_lohi.iloc[children_index_list]['longitude']
            y = swarm_lohi.iloc[children_index_list]['latitude']
            mag = swarm_lohi.iloc[children_index_list]['magnitude']
            xc = swarm_lohi.iloc[index_trigger]['longitude']
            yc = swarm_lohi.iloc[index_trigger]['latitude']
            mag_c = swarm_lohi.iloc[index_trigger]['magnitude']
            ax.scatter(x,y,s=np.exp(mag),alpha=0.2)
            ax.scatter(xc,yc,s=np.exp(mag_c),marker='*',facecolors='yellow',color='black')
            ax.set_title(r'$m=%2.1f$'%mag_c,fontsize='small')

            #--- draw planes
            dd=max(x.max()-x.min(),y.max()-y.min())           
            arrows(ax,(xc,yc),n1, n3, dd/3)
                    
            ax.set_xlim(xc-dd/2,xc+dd/2)    
            ax.set_ylim(yc-dd/2,yc+dd/2)    
            
#            fig.savefig('%s/spatialMap%s.png'%(DIR_OUTPT_figs,triggerID),bbox_inches='tight',dpi=150)
        except:
            continue
    #    print (xc,yc),(x1, y1)

        plt.show()
                   

                   
#trigger_list = set(swarm_lohi.iloc[nij_trig['Parent_id']]['Event ID'])
trigger_list = set(swarm_shuffled.iloc[nij_trig['Parent_id']]['Event ID'])

#PlotMomentTensor(trigger_list, swarm_lohi, nij_trig, nmin=30 )
PlotMomentTensor(trigger_list, swarm_shuffled, nij_trig, nmin=10 )

transform the frame

In [44]:
# #trigger_list = set(swarm_lohi.iloc[nij_trig['Parent_id']]['Event ID']) & set(df_moment['Event ID'])


# fig = plt.figure(figsize=(4,4))
# ax = fig.add_subplot(111)
# for triggerID in trigger_list:
    
#     #--- parse moment tensor
# #    df_trigger = df_moment[df_moment['Event ID'] == triggerID]
#     df_trigger = swarm_lohi[swarm_lohi['Event ID'] == triggerID]
        
#     n1 = getN(df_trigger['Azimuth(T)'],df_trigger['Plunge(T)']) #--- principal directions (azimuth, plunge)
#     n3 = getN(df_trigger['Azimuth(P)'],df_trigger['Plunge(P)'])
#     n2 = np.cross(n3,n1) #getN(df_trigger['Azimuth(N)'],df_trigger['Plunge(N)'])
#     if np.isnan(n1[0]):
#             continue
            
#     Q=np.c_[n1,n3,n2].T #--- transformation 
    
#     #--- list of children
#     index_trigger = swarm_lohi[swarm_lohi['Event ID']==triggerID].index[0]
#     children_index_list = nij_trig[nij_trig['Parent_id']==index_trigger]['Event_id']
#     #--- plot events
    
#     x = swarm_lohi.iloc[children_index_list]['x(km)']
#     y = swarm_lohi.iloc[children_index_list]['y(km)']
#     z = swarm_lohi.iloc[children_index_list]['z(km)']
#     xc = swarm_lohi.iloc[index_trigger]['x(km)']
#     yc = swarm_lohi.iloc[index_trigger]['y(km)']
#     zc = swarm_lohi.iloc[index_trigger]['z(km)']
#     mag = swarm_lohi.iloc[children_index_list]['magnitude']
#     mag_c = swarm_lohi.iloc[index_trigger]['magnitude']    

#     r_local = np.matmul(Q,np.c_[x-xc,y-yc,z-zc].T).T #Transform
    
#     ax.scatter(r_local[:,0],r_local[:,1],c='C0',alpha=0.1,s=np.exp(mag))
# #    ax.scatter(xc,yc,color='yellow')
#     #--- plot moment tensor

    
#     #--- project t, p
#     n1 = np.array([1.0,0.0]) 
#     n3 = np.array([0.0,1.0]) 
#     rr=4 #.2*(r_local[:,0].max()-r_local[:,0].min())
    
# #--- draw axis
# xc = 0.0
# yc = 0.0
# zc = 0.0
# [x1,y1]=np.array([xc,yc])+rr*n3[:2]
# ax.annotate("", (xc,yc), xytext=(x1, y1),
#                  textcoords='data',clip_on=True,
#                  arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 
# [x1,y1]=np.array([xc,yc])-rr*n3[:2]
# ax.annotate("", (xc,yc), xytext=(x1, y1),
#                  textcoords='data',clip_on=True,
#                  arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="red",linewidth="0.3")) 

# [x1,y1]=np.array([xc,yc])+rr*n1[:2]
# ax.annotate("", (x1,y1), xytext=(xc, yc),
#                  textcoords='data',clip_on=True,
#                  arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3")) 
# [x1,y1]=np.array([xc,yc])-rr*n1[:2]
# ax.annotate("", (x1,y1), xytext=(xc, yc),
#                  textcoords='data',clip_on=True,
#                  arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.4",color="black",linewidth="0.3")) 

# dd=20 #max(r_local[:,0].max()-r_local[:,0].min(),r_local[:,1].max()-r_local[:,1].min())
# ax.set_xlim(xc-dd/2,xc+dd/2)    
# ax.set_ylim(yc-dd/2,yc+dd/2)    
# ax.set_xlabel(r'$x(km)$')
# ax.set_ylabel(r'$y(km)$')
# #fig.savefig('%s/spatialMap.png'%(DIR_OUTPT_figs),bbox_inches='tight',dpi=150)

# plt.show()

magnitude bins

In [45]:
def MagBins( trigger_list, swarm_lohi, nij_trig, m):
    r_local_m_bins = {}
    mag_m_bins = {}
    for triggerID in trigger_list:
        #--- parse moment tensor
    #    df_trigger = df_moment[df_moment['Event ID'] == triggerID]
        df_trigger = swarm_lohi[swarm_lohi['Event ID'] == triggerID]
            
        n1 = getN(df_trigger['Azimuth(T)'],df_trigger['Plunge(T)']) #--- principal directions (azimuth, plunge)
        n3 = getN(df_trigger['Azimuth(P)'],df_trigger['Plunge(P)'])
        n2 = np.cross(n3,n1) #getN(df_trigger['Azimuth(N)'],df_trigger['Plunge(N)'])
        if np.isnan(n1[0]):
                continue
        magnitude_indx = m.searchsorted(df_trigger['magnitude'].iloc[0])
        
        
        Q=np.c_[n1,n3,n2].T #--- transformation 
        
        #--- list of children
        index_trigger = swarm_lohi[swarm_lohi['Event ID']==triggerID].index[0]
        children_index_list = nij_trig[nij_trig['Parent_id']==index_trigger]['Event_id']
        #--- plot events
        
        x = swarm_lohi.iloc[children_index_list]['x(km)']
        y = swarm_lohi.iloc[children_index_list]['y(km)']
        z = swarm_lohi.iloc[children_index_list]['z(km)']
        xc = swarm_lohi.iloc[index_trigger]['x(km)']
        yc = swarm_lohi.iloc[index_trigger]['y(km)']
        zc = swarm_lohi.iloc[index_trigger]['z(km)']
        mag = swarm_lohi.iloc[children_index_list]['magnitude']
        mag_c = swarm_lohi.iloc[index_trigger]['magnitude']    
        
        r_local = np.matmul(Q,np.c_[x-xc,y-yc,z-zc].T).T #Transform
        if magnitude_indx in r_local_m_bins:
    #        print r_local_m_bins[ magnitude_indx ].shape, r_local.shape
            r_local_m_bins[ magnitude_indx ] = np.concatenate((r_local_m_bins[ magnitude_indx] ,r_local),axis=0)
            mag_m_bins[ magnitude_indx ] = np.concatenate((mag_m_bins[ magnitude_indx] ,mag),axis=0)
        else:
            r_local_m_bins[ magnitude_indx ] = r_local[:,:]
            mag_m_bins[ magnitude_indx ] = mag.copy()
    
            
    xc = yc = 0.0
    for magnitude_indx in r_local_m_bins:
        fig = plt.figure(figsize=(4,4))
        ax = fig.add_subplot(111)
        r_local = r_local_m_bins[ magnitude_indx ]
        mag = mag_m_bins[ magnitude_indx ]
    #    print r_local.shape
        ax.scatter(r_local[:,0],r_local[:,1],c='C0',alpha=0.1,s=np.exp(mag))
        ax.set_title(r'$m=%2.1f$'%m[magnitude_indx],fontsize='small')
        dd = 2*max(np.std(r_local[:,0]),np.std(r_local[:,1])) #np.abs(r_local[:,0]).max(),np.abs(r_local[:,1]).max())/4
        ax.set_xlim(xc-dd,xc+dd)    
        ax.set_ylim(yc-dd,yc+dd)
        ax.set_xlabel(r'$x(km)$')
        ax.set_ylabel(r'$y(km)$')
        arrows(ax,(xc,yc),np.array([1.0,0.0]),np.array([0.0,1.0]),dd/3)
        
#        fig.savefig('map%s.png'%magnitude_indx,dpi=75,bbox_inches='tight')
    
    plt.show()
    return r_local_m_bins
    

    
m=np.array([1.999999,
 2.99999925,
 3.9999995000000004,
 4.99999975,
 6.0,
 6.41,
 7.09,
 7.109999999999999])

#r_local_m_bins = MagBins( trigger_list, swarm_lohi, nij_trig, m)
r_local_m_bins = MagBins( trigger_list, swarm_shuffled, nij_trig, m)

interpolation

In [46]:
from scipy import stats

def interpolation(r_local_m_bins):
    xc = yc = 0.0
    for magnitude_indx in r_local_m_bins:
        try:
            #--- interpolate
            r_local = r_local_m_bins[ magnitude_indx ]
            dd = 2*max(np.std(r_local[:,0]),np.std(r_local[:,1])) #np.abs(r_local[:,0]).max(),np.abs(r_local[:,1]).max())/4
            ymin = xmin = -dd
            ymax = xmax = +dd
            X, Y = np.mgrid[ xmin:xmax:200j, ymin:ymax:200j]
            positions = np.vstack([X.ravel(), Y.ravel()])
            values = np.vstack([r_local[:,0], r_local[:,1]])
            kernel = stats.gaussian_kde(values)
            kernel.set_bandwidth(bw_method=kernel.factor / 10.) #--- smoothing length
            Z = np.reshape(kernel(positions).T, X.shape)
            #--- plot
            fig = plt.figure(figsize=(4,4))
            ax = fig.add_subplot(111)
            ax.imshow(np.rot90(Z), cmap='coolwarm', #plt.cm.gist_earth_r,
                      extent=[xmin, xmax, ymin, ymax])
            ax.plot(r_local[:,0], r_local[:,1], 'k.', markersize=1)
            ax.set_xlim(xc-dd/2,xc+dd/2)    
            ax.set_ylim(yc-dd/2,yc+dd/2)
            ax.set_xlabel(r'$x(km)$',fontsize='small')
            ax.set_ylabel(r'$y(km)$',fontsize='small')
            ax.set_title(r'$m=%2.1f$'%m[magnitude_indx],fontsize='small')
            arrows(ax,(xc,yc),np.array([1.0,0.0]),np.array([0.0,1.0]),dd/6)
        except:
            continue
    plt.show()
    
    
interpolation(r_local_m_bins)  

Correlations

In [47]:
def Correlations(r_local_m_bins):
    xc = yc = 0.0
    for magnitude_indx in r_local_m_bins:
        try:
        #--- interpolate
            r_local = r_local_m_bins[ magnitude_indx ]
            dd = max(np.std(r_local[:,0]),np.std(r_local[:,1])) #np.abs(r_local[:,0]).max(),np.abs(r_local[:,1]).max())/4
            ymin = xmin = -dd
            ymax = xmax = +dd
            X, Y = np.mgrid[ xmin:xmax:201j, ymin:ymax:201j]
            positions = np.vstack([X.ravel(), Y.ravel()])
            values = np.vstack([r_local[:,0], r_local[:,1]])
            kernel = stats.gaussian_kde(values)
            kernel.set_bandwidth(bw_method=kernel.factor / 10.) #--- smoothing length
            Z = np.reshape(kernel(positions).T, X.shape)
            #--- fft
            Z -= Z.mean()
            Z /= Z.std()
            Z=np.rot90(Z)
            Zq = np.abs(np.fft.fft2(Z))**2
            zfield = np.fft.ifft2(Zq).real
            zfield = np.fft.ifftshift(zfield) / (zfield.shape[0]*zfield.shape[1])
            #--- plot
            fig = plt.figure(figsize=(4,4))
            ax = fig.add_subplot(111)
            pos = ax.imshow(zfield,cmap='bwr',extent=[xmin, xmax, ymin, ymax]) #,vmin=-0.5, vmax=.5)
            ax.contour( zfield, np.arange(zfield.min(),zfield.max(),0.15) ,
                       extent=[xmin, xmax, ymin, ymax],
                      origin='image',colors='black',linewidths=0.2)
#            fig.colorbar( pos, fraction = 0.04) #, format = '%2.1f' )
            ax.set_xlim(xc-dd/2,xc+dd/2)    
            ax.set_ylim(yc-dd/2,yc+dd/2)
#            ax.set_xlabel(r'$x(km)$')
#            ax.set_ylabel(r'$y(km)$')
            ax.set_title(r'$m=%2.1f, n_{th}=%2.1e$'%(m[magnitude_indx],n_thresh),fontsize='small')
            arrows(ax,(xc,yc),np.array([1.0,0.0]),np.array([0.0,1.0]),dd/3)
            fig.savefig('corr_map%s.png'%magnitude_indx,dpi=.55*75,bbox_inches='tight')
        except:
            continue
    plt.show()
    
    
Correlations(r_local_m_bins)
    
    

quantify anisotropy

In [48]:
xc = yc = 0.0
for magnitude_indx in [7]: #r_local_m_bins:
    #--- interpolate
    r_local = r_local_m_bins[ magnitude_indx ]
    dd = max(np.abs(r_local[:,0]).max(),np.abs(r_local[:,1]).max())
    ymin = xmin = -dd
    ymax = xmax = +dd
    X, Y = np.mgrid[ xmin:xmax:401j, ymin:ymax:401j]
    positions = np.vstack([X.ravel(), Y.ravel()])
    values = np.vstack([r_local[:,0], r_local[:,1]])
    kernel = stats.gaussian_kde(values)
    kernel.set_bandwidth(bw_method=kernel.factor / 10.) #--- smoothing length
    Z = np.reshape(kernel(positions).T, X.shape)
    #--- fft
    Z -= Z.mean()
    Z /= Z.std()
    Z=np.rot90(Z)
    Zq = np.abs(np.fft.fft2(Z))**2
    zfield = np.fft.ifft2(Zq).real
    zfield = np.fft.ifftshift(zfield) / (zfield.shape[0]*zfield.shape[1])
    #--- plot
    fig = plt.figure(figsize=(4,4))
    ax = fig.add_subplot(111)
    pos = ax.imshow(zfield,cmap='bwr',extent=[xmin, xmax, ymin, ymax]) #,vmin=-0.5, vmax=.5)
    ax.contour( zfield, np.arange(zfield.min(),zfield.max(),0.15) ,
               extent=[xmin, xmax, ymin, ymax],
              origin='image',colors='black',linewidths=0.2)
    fig.colorbar( pos, fraction = 0.04) #, format = '%2.1f' )
    dd = max(np.std(r_local[:,0]),np.std(r_local[:,1]))*2
    ax.set_xlim(xc-dd/2,xc+dd/2)    
    ax.set_ylim(yc-dd/2,yc+dd/2)
    ax.set_xlabel(r'$x(km)$')
    ax.set_ylabel(r'$y(km)$')
    ax.set_title(r'$m=%2.1f$'%m[magnitude_indx],fontsize='small')
    arrows(ax,(xc,yc),np.array([1.0,0.0]),np.array([0.0,1.0]),dd/3)
#    fig.savefig('corr_map%s.png'%magnitude_indx,dpi=75,bbox_inches='tight')
plt.show()

KeyErrorTraceback (most recent call last)
<ipython-input-48-3088d9033503> in <module>()
      2 for magnitude_indx in [7]: #r_local_m_bins:
      3     #--- interpolate
----> 4     r_local = r_local_m_bins[ magnitude_indx ]
      5     dd = max(np.abs(r_local[:,0]).max(),np.abs(r_local[:,1]).max())
      6     ymin = xmin = -dd

KeyError: 7
In [49]:
R=[1,2,4]

fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlabel(r'$\theta$')
ax.set_ylabel(r'$c(\theta)$')

rsq = X*X+Y*Y
for r in R:
    dr=0.1*r
    indices=np.all([rsq<(r+dr)**2, rsq>(r-dr)**2], axis=0)
    xx=Y[indices]
    yy=-X[indices]
    theta=180*np.arctan2(yy,xx)/pi
    rho = zfield[indices]
    rho = rho[theta>=0.0]
    rho /= (np.abs(rho)).max()
    theta = theta[theta>=0.0]
    ax.scatter(theta,rho,label=r'$r=%3.2f$'%r,s=12)
ax.legend(fontsize='x-small')


plt.show()

NameErrorTraceback (most recent call last)
<ipython-input-49-41a77a69ea0c> in <module>()
      6 ax.set_ylabel(r'$c(\theta)$')
      7 
----> 8 rsq = X*X+Y*Y
      9 for r in R:
     10     dr=0.1*r

NameError: name 'X' is not defined
In [50]:
THETA=[45.0, 90.0, 135.0]

fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlabel(r'$r$')
ax.set_ylabel(r'$c(r)$')
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_ylim(1e-2,1)

for theta in THETA:
    dt=0.01*theta
    xx=Y
    yy=-X
    indices=np.abs(np.angle(xx+1j*yy,deg=True)-theta) < dt

    xx=xx[indices]
    yy=yy[indices]

    rr=np.sqrt(xx*xx+yy*yy)
    rho = zfield[indices]

    ax.plot(rr,rho,label=r'$\theta=%3.1f$'%theta)
ax.legend(fontsize='x-small')

plt.show()

NameErrorTraceback (most recent call last)
<ipython-input-50-b2161a07e05f> in <module>()
     11 for theta in THETA:
     12     dt=0.01*theta
---> 13     xx=Y
     14     yy=-X
     15     indices=np.abs(np.angle(xx+1j*yy,deg=True)-theta) < dt

NameError: name 'Y' is not defined

plot moment tensor and triggered events

In [51]:
#--- density plots: rho(r)
#--- split based on the mainshock's magnitude

import numpy as np
import math



def AddTime( df_trig, df_complete ):
    prefact = 1.0 / ( 24.0 * 60 * 60 ) #--- daily
    x=df_complete.loc[ df_trig['Event_id'] ]
    y=df_complete.loc[ df_trig['Parent_id'] ]
    x.reset_index(inplace=True)
    y.reset_index(inplace=True)
    df_trig['T'] = x['date']-y['date']
    df_trig['T'] = df_trig['T'].apply(lambda x: x.total_seconds() * prefact)

def RemovePair( df, cs ):
    return df [ df[ 'R' ] <= df[ 'T' ] * cs ]


Mlist=m=[1.999999,
 2.99999925,
 3.9999995000000004,
 4.99999975,
 6.0,
 6.41,
 7.09,
 7.109999999999999]

#--- choose m0<m<m1
X={}
rho_dict={}
m_range={}
Err={}
m_mean={}
for i in xrange( len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        
        df_trig, m_mean[ i ],junk = GetPairsWithSpecifiedParentMag((m0,m1),swarm_lohi,nij_trig) #--- get parent with m0<m<m1
        df_trig.reset_index( inplace = True, drop = True )
        
        #--- rho plots
        n_decades = int( math.ceil( log(df_trig['R'].max()/df_trig['R'].min(),10) ) )
        nbin_per_decade = 4
        nbins =  nbin_per_decade * n_decades
        
        bins = np.logspace(log(df_trig['R'].min(),10)-1.e-10,
                                                  log(df_trig['R'].max(),10)+1.e-10,nbins)
        bins = AdjustBins(df_trig['R'],bins)
    
        rho, xedges=np.histogram(df_trig['R'], 
                                 density=True,
                                 bins=bins)
        hist, xedges=np.histogram(df_trig['R'],
                                  bins=bins)
        x_bins, junk=np.histogram(df_trig['R'], #--- histogram
                                  bins=bins,
                                 weights = df_trig['R'])
    
        #--- rm empty bins
        x_bins = x_bins[ hist != 0 ]
        rho = rho[ hist != 0 ]
        hist = hist[ hist != 0 ]
        
        #--- poisson err 
        err = rho/np.sqrt(hist)
        x_bins /= hist
                
        #--- scattered plot
        fig=plt.figure(figsize=(8,4))
        ax=fig.add_subplot(1,2,1)
        ax.set_title('$%2.1f<m<%2.1f$'%(m0,m1))
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$t$')
        ax.set_ylabel('$r$')
        ax.set_xlim(1e-5,10)
        ax.set_ylim(1e-4,1e2)
        ax.scatter(df_trig['T'],df_trig['R'],alpha=0.1)
        
        #--- discretization effects
#         dt_min=df_trig['T'].min()
#         dt_max=df_trig['T'].max()
#         dr_min=df_trig['R'].min()
#         dr_max=df_trig['R'].max()
        
#         ax.plot([dt_min,dt_min],[dr_min,dr_max],
#                  '-',color='blue')
#         ax.plot([dt_max,dt_max],[dr_min,dr_max],
#                  '-',color='blue')
#         ax.plot([dt_min,dt_max],[dr_min,dr_min],
#                  '-',color='blue')
#         ax.plot([dt_min,dt_max],[dr_max,dr_max],
#                  '-',color='blue')
        
#         ax.set_ylim(dr_min/2,dr_max*2)
#         ax.plot([dt_min,dt_max],[dt_min*cs,dt_max*cs],
#                  '-',color='red')
        
        #--- rho
        ax=fig.add_subplot(1,2,2)
        ax.set_xlim(1e-4,1e2)
        ax.set_ylim(1e-5,1e1)
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$r$')
        ax.set_ylabel('$P(r)$')
        ax.errorbar(x_bins,rho, yerr=err)
            
        X[i] = x_bins[:]
        rho_dict[i]=rho[:]
        m_range[i]=np.array([m0,m1])
        Err[i]=err[:]
#        plt.savefig('%s/rho_r.%s.png'%(DIR_OUTPT_figs,i),dpi=50,bbox_inches='tight')
    except:
        continue

plt.show()

Declustered Catalog

In [52]:
df_triggered.head()
Out[52]:
Event ID date magnitude latitude longitude depth Azimuth(T) Plunge(T) Azimuth(P) Plunge(P) x(km) y(km) z(km)
6 38443215 2019-07-04 17:39:35.360 2.61 35.688 -117.493 11.0 NaN NaN NaN NaN 2.894307 42.273296 2.7
7 38443223 2019-07-04 17:40:02.250 2.59 35.725 -117.571 6.6 NaN NaN NaN NaN 16.363965 28.958583 7.6
9 37421957 2019-07-04 17:41:20.850 4.23 35.727 -117.529 2.0 NaN NaN NaN NaN 8.794240 36.059730 2.8
13 38443263 2019-07-04 17:46:03.710 2.00 35.643 -117.610 10.1 NaN NaN NaN NaN 9.350837 39.832246 6.4
19 38443311 2019-07-04 17:57:31.460 2.31 35.660 -117.522 2.3 307.0 12.0 38.0 8.0 6.679169 40.719899 7.9
In [53]:
#--- plot complete catalog

def ConvertDailyRate(hist, bin_edges ):
#---convert data to daily rate     
    t0 = pd.to_datetime( bin_edges[ 0 ] )
    t1 = pd.to_datetime( bin_edges[ 1 ] )
    delta_t = ( t1 - t0 ).total_seconds() / ( 60 * 60 * 24)
    hist *= ( bin_edges[ 1 ] - bin_edges[ 0 ] ) / delta_t

def ActivityRate( df ):
    nbins = int( (df['date'].max()-df['date'].min()).days / 2 ) #--- number of bins
    
    tmax = df['date'].max().value #--- min/max
    tmin = df['date'].min().value
    
    hist, bin_edges = np.histogram(df['date'].apply(lambda x:x.value),                                   
                    bins=np.linspace(tmin,tmax,nbins+1,endpoint=True),density=True) #--- probability dist.
    hist *= len( df['date'] ) #--- int(hist).dt=n
    cumm_number = np.cumsum(hist)*(bin_edges[1]-bin_edges[0]) #--- accumulated number
    ConvertDailyRate( hist, bin_edges ) #--- daily activity
    return bin_edges, hist, cumm_number
    
class Tlohi:
    def __init__(self,lo,hi):
        self.lo = lo
        self.hi = hi
        
#---------------------------------------------------------------------------------
#-----------------
#-----------------
#-----------------
#---------------------------------------------------------------------------------
#--- set min/max time to avoid temporal incompletenesss issue
df_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
df_lohi = GetCartesian( df_lohi )
df_lohi.reset_index(inplace=True,drop=True)

#--- activity rate
bin_edges, hist, cumm_number = ActivityRate( df_lohi )

#--- temporal map
fig = plt.figure(figsize=(8,9))#,dpi=150)
ax1=fig.add_subplot(311)
ax2=fig.add_subplot(312)
ax3=fig.add_subplot(313)
ax1.set_ylabel('$M$')
ax1.set_xlim(df_lohi['date'].min(),df_lohi['date'].max())
ax1.tick_params(axis='both',labelsize=18)
ax2.set_ylabel('$M$')
ax2.set_xlim(df_lohi['date'].min(),df_lohi['date'].max())
ax2.tick_params(axis='both',labelsize=18)
ax3.set_ylabel('$M$')
ax3.set_xlim(df_lohi['date'].min(),df_lohi['date'].max())
ax3.tick_params(axis='both',labelsize=18)
#ax1.set_ylim(mc,2.5)
#ax1.set_xlim(pd.to_datetime('2019-03-01'),pd.to_datetime('2019-03-30'))
#
# ax1R = ax1.twinx()  # instantiate a second axes that shares the same x-axis
# ax1R.set_ylabel('daily activity rate',color='red')
# ax1R.set_yscale('log')
# ax1R.tick_params(axis='y',labelsize=18,colors='red')
#
#ax2 = fig.add_subplot(212)
#ax2.set_ylim(mc,2.5)
#ax2.set_ylabel('Max Rate (m3/min)')
#ax2.set_ylim(11.9,12.1)
#ax2.set_xlim(pd.to_datetime('2019-03-01'),pd.to_datetime('2019-03-30'))
#

#--- plot
ax1.scatter(df_triggered['date'],df_triggered['magnitude'],
            s=2**(df_triggered['magnitude']),
            alpha=0.05,color='blue', label = 'Triggered')
#
ax2.scatter(df_background['date'],df_background['magnitude'],
            s=2**(df_background['magnitude']),
            alpha=0.05,color='red', label = 'Declustered')
#
ax3.scatter(df_lohi['date'],df_lohi['magnitude'],
            s=2**(df_lohi['magnitude']),
            alpha=0.05,color='black', label = 'Total')
fig.legend(loc='best',frameon=True,labelspacing=0)
#ax1R.plot(pd.to_datetime(bin_edges[:-1]),hist,'r-',linewidth=1)

#DrawFrame(ax1, (0.11,0.07),(0.32,0.1),0.01)
ax3.xaxis.set_major_formatter(dates.DateFormatter('%b %y'))
fig.autofmt_xdate()

#fig.savefig('%s/timeSeries_ok.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')

plt.show()
In [54]:
#--- plot spatial map 
#--- fault map california: temporal evolution of events


#--- set min/max time to avoid temporal incompletenesss issue
df_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
df_lohi = GetCartesian( df_lohi )
df_lohi.reset_index(inplace=True,drop=True)
# df_triggered = GetCartesian( df_triggered )
# df_background = GetCartesian( df_background )


xlo = df_lohi['x(km)'].min()
xhi = df_lohi['x(km)'].max()
ylo = df_lohi['y(km)'].min()
yhi = df_lohi['y(km)'].max()
L = max(xhi-xlo,yhi-ylo)
exp = 2

df_triggered.plot.scatter('x(km)','y(km)',
                        s=exp**(df_triggered['magnitude']),
                        color = 'blue', label = 'triggered',xlim=(0,L),ylim=(0,L),
#                        c='date',cmap='jet',
                        alpha=0.01,
                        figsize=(4,4)) #--- plot

df_background.plot.scatter('x(km)','y(km)',
                        s=exp**(df_background['magnitude']),
                        color = 'red', label = 'background',xlim=(0,L),ylim=(0,L),
#                        c='date',cmap='jet',
                        alpha=0.01,
                        figsize=(4,4)) #--- plot

df_lohi.plot.scatter('x(km)','y(km)',
                        s=exp**(df_lohi['magnitude']),
                        color = 'black', label = 'Total',xlim=(0,L),ylim=(0,L),
#                        c='date',cmap='jet',
                        alpha=0.01,
                        figsize=(4,4)) #--- plot







plt.show()
#plt.savefig('%s/map.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')
In [55]:
#trig_ratio *= 0.75
#n_thresh_new = edges_actual[:-1][cdf_actual <= trig_ratio][-1]
#new_quantile = cdf[edges[:-1]<=n_thresh_new][-1]
#print 'new_quantile=%s'%new_quantile
#n_thresh = n_thresh_new
In [56]:
#--- plot clusters


def Inside(t,(tmin,tmax)):
    if tmin<= t<tmax:
        return True
    return False    

def GetTrigList( nij_trig ):
    d_trig = {}
    for items in nij_trig.itertuples():
        triggerID = items.Parent_id
        d_trig.setdefault( triggerID, [] ).append( items.Event_id )
    return d_trig

def PlotArrowsTime(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
    tlist=[]
    mlist=[]
    for triggerID in d_trig: #--- mother events
        
        t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
        m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
        if not ( Inside(t0,(tmin,tmax) ) and #--- within the given interval
                 Inside(swarm_lohi['x(km)'].iloc[triggerID],(xmin,xmax)) and 
                 Inside(swarm_lohi['y(km)'].iloc[triggerID],(ymin,ymax)) ):
            continue

#        x0 = md.date2num( t0 ) #--- convert to time object
#        y0 = m0 #--- magnitude
        
        tlist.append( t0 )
        mlist.append( m0 )

        for daughter_id in d_trig[triggerID]: #--- children
            
            t1 = df_complete['date'].iloc[ daughter_id ] #--- time
            m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
            
            tlist.append( t1 )
            mlist.append( m1 )
            
            if not ( Inside(t1,(tmin,tmax) ) and #--- within the given interval
                     Inside(swarm_lohi['x(km)'].iloc[daughter_id],(xmin,xmax)) and 
                     Inside(swarm_lohi['y(km)'].iloc[daughter_id],(ymin,ymax)) ):
                continue
                
#            x1 = md.date2num( t1 ) #--- convert to time object
#            y1 = m1 #--- magnitude

#            xw = x1 - x0 #--- t_child-t_parent
#            yw = y1 - y0 #--- m_child - m_parent
            plt.annotate("", (t1,m1), xytext=(t0, m0),
                         textcoords='data',
                        arrowprops=dict(arrowstyle="-|>,head_width=.4,head_length=.8",color="b",linewidth="0.3")) 
    
    #--- plot circles
    df=pd.DataFrame({'date':tlist,'mag':mlist})
    plt.scatter(df['date'], df['mag'],
                s=4**(df['mag']),
                alpha=1,
                facecolors='red',color='black')
    #plt.savefig('timeSeries.png')
    
#--------------------------------------
#----- key: event value: aftershock id
#--------------------------------------
d_trig = GetTrigList( nij_trig )

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)

#--------------------
#--- cartesian coords
#--------------------
swarm_lohi = GetCartesian( swarm_lohi )



# #--- plot arrows
# #dt=30
# for ii in xrange(1):
#     #--- setup
#     fig = plt.figure(figsize=(16,4),frameon=False) #, dpi=300)
#     #--- xlimit
#     xmin = swarm_lohi['longitude'].min()
#     xmax = swarm_lohi['longitude'].max()
#     ymin = swarm_lohi['latitude'].min()
#     ymax = swarm_lohi['latitude'].max()
#     plt.tick_params(axis='x', labelsize=14)
#     plt.tick_params(axis='y', labelsize=14)
    
#     #--- plot all events
#     plt.ylim(mc,swarm_lohi['magnitude'].max())
#     plt.scatter(swarm_lohi['date'],swarm_lohi['magnitude'],
#                 s=2*np.exp(swarm_lohi['magnitude']),
#             alpha=0.1)

#     key_max = swarm_lohi.loc[nij_trig.groupby(by='Parent_id').groups.keys()]['magnitude'].sort_values(ascending=False).index[ii]
# #    print 'key=',key_max
# #    print 't=',swarm_lohi.loc[key_max]['date']
# #    print 'x=',swarm_lohi.loc[key_max]['x(km)'],
# #    print 'y=',swarm_lohi.loc[key_max]['y(km)'],
# #    print 'z=',swarm_lohi.loc[key_max]['z(km)']
#     dt = nij_trig[nij_trig['Parent_id']==key_max]['T'].max()
#     dx = nij_trig[nij_trig['Parent_id']==key_max]['R'].max()
#     tt=swarm_lohi['date'].loc[key_max]
#     xx = swarm_lohi.loc[key_max][['x(km)','y(km)','z(km)']]
#     PlotArrowsTime( swarm_lohi, d_trig,
#                        ( tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt)), 
#            (xx['x(km)']-dx,xx['x(km)']+dx),
#            (xx['y(km)']-dx,xx['y(km)']+dx) )
#     plt.xlim(tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt))
In [57]:
# #--- plot clusters


# def Inside(t,(tmin,tmax)):
#     if tmin<= t<tmax:
#         return True
#     return False    

# def GetTrigList( nij_trig ):
#     d_trig = {}
#     for items in nij_trig.itertuples():
#         triggerID = items.Parent_id
#         d_trig.setdefault( triggerID, [] ).append( items.Event_id )
#     return d_trig

    
# def PlotArrowsSpace(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
#     xlist=[]
#     ylist=[]
#     mlist=[]
#     for triggerID in d_trig: #--- mother events
        
#         t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
#         ( x0, y0 ) = ( swarm_lohi['longitude'].iloc[triggerID], 
#                        swarm_lohi['latitude'].iloc[triggerID] )
#         m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
# #        if not ( Inside(t0,(tmin,tmax) ) and #--- within the given interval
#  #                Inside(x0,(xmin,xmax)) and 
#   #               Inside(y0,(ymin,ymax)) ):
#    #         continue

#         for daughter_id in d_trig[triggerID]: #--- children
#             t1 = df_complete['date'].iloc[ daughter_id ] #--- time
#             ( x1, y1 ) = ( swarm_lohi['longitude'].iloc[ daughter_id ], 
#                            swarm_lohi['latitude'].iloc[ daughter_id ] )
#             m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
            
#             xlist.append( x1 )
#             ylist.append( y1 )
#             mlist.append( m1 )
            
# #            if not ( Inside(t1,(tmin,tmax) ) and #--- within the given interval
# #                     Inside(x1,(xmin,xmax)) and 
# #                     Inside(y1,(ymin,ymax)) ):
# #                continue

#             ax.annotate("", (x1,y1), xytext=(x0, y0),
#                          textcoords='data',
#                         arrowprops=dict(arrowstyle="-|>,head_width=.2,head_length=.8",color="b",linewidth="0.3")) 
 
#         #--- plot mother
#         plt.scatter(x0,y0,
#                     s=exponent**m0,
#                     marker='*',
#                     facecolors='yellow',color='black',
#                     alpha=1.0)

#     #--- plot circles
#     df=pd.DataFrame({'x(km)':xlist,'y(km)':ylist,'m':mlist})
#     ax.scatter(df['x(km)'], df['y(km)'],
#                 s=exponent**(df['m']),
#                 alpha=1,
#                 facecolors='red',color='black')
    
# #--------------------------------------
# #----- key: event value: aftershock id
# #--------------------------------------
# d_trig = GetTrigList( nij_trig )

# #--------------------
# #----- subset (the same as trig. analysis)
# #--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# swarm_lohi.reset_index(inplace=True,drop=True)

# #--------------------
# #--- cartesian coords
# #--------------------
# swarm_lohi = GetCartesian( swarm_lohi )



# #--- plot arrows
# exponent = 4
# for ii in xrange(1):
#     #--- setup
#     fig = plt.figure(figsize=(8,8)) #,frameon=False) #, dpi=300)
#     ax = fig.add_subplot(111)
#     #--- xlimit
#     xmin = swarm_lohi['longitude'].min()
#     xmax = swarm_lohi['longitude'].max()
#     ymin = swarm_lohi['latitude'].min()
#     ymax = swarm_lohi['latitude'].max()
#     ax.tick_params(axis='x', labelsize=14)
#     ax.tick_params(axis='y', labelsize=14)
    
#     #--- plot all events
#     ax.scatter(swarm_lohi['longitude'], swarm_lohi['latitude'], # 'x(km)'],swarm_lohi['y(km)'],
#                 s=exponent**swarm_lohi['magnitude'],
#                 alpha=0.1)

# #    key_max = swarm_lohi.loc[nij_trig.groupby(by='Parent_id').groups.keys()]['magnitude'].sort_values(ascending=False).index[ii]
#     key_max = nij_trig.groupby(by='Parent_id').count()['R'].sort_values(ascending=False).index[ii]
#     dt = nij_trig[nij_trig['Parent_id']==key_max]['T'].max()
#     dx = 0.1 #nij_trig[nij_trig['Parent_id']==key_max]['R'].max()
#     tt = swarm_lohi['date'].loc[key_max]
#     xx = swarm_lohi.loc[ key_max ][['longitude','latitude']] #[['x(km)','y(km)','z(km)']]
    
#     PlotArrowsSpace( swarm_lohi, d_trig,
#                        ( tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt)), 
#            (xx['longitude']-dx,xx['longitude']+dx),
#            (xx['latitude']-dx,xx['latitude']+dx) )
#     ax.set_xlim(xx[0]-dx,xx[0]+dx)
#     ax.set_ylim(xx[1]-dx,xx[1]+dx)
In [58]:
# #--- spatial clusters



# #def GetCartesian2nd( dff ):
# #    df = dff.copy()
# #    xlo = df['longitude'].min()
# #    xhi = df['longitude'].max()
# #    ylo = df['latitude'].min()
# #    yhi = df['latitude'].max()
# #    getDistX = lambda x: geopy.distance.vincenty( ( 0.0, xlo ), ( 0.0, x ) ).km
# #    getDistY = lambda y: geopy.distance.vincenty( ( ylo, 0.0 ), ( y, 0.0 ) ).km
# #    df[ 'x' ] = df[ 'longitude' ].apply( getDistX )
# #    df[ 'y' ] = df[ 'latitude' ].apply( getDistY )
# #    return df


# def PlotMap(df_complete,TRIG_ID, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
#     #--- plot 
#     counter = 0
#     exponent=4
#     for triggerID,colin in zip(TRIG_ID,colors):
#         t0 = df_complete['date'].iloc[ triggerID ]
# #        if not ( Inside(t0,(tmin,tmax) ) and 
# #                 Inside(df_complete['y(m)'].iloc[triggerID],(xmin,xmax)) and 
# #                 Inside(df_complete['z(m)'].iloc[triggerID],(ymin,ymax)) ):  
# #              continue
#         counter += 1
#         if counter > 10:
#             break
#         #--- plot    
#         fig = plt.figure(figsize=(5,5))#,dpi=150)
#         ax = fig.add_subplot(111)
# #        plt.xlim(xmin,xmax)
# #        plt.ylim(ymin,ymax)
#         plt.tick_params(axis='x', labelsize=14)
#         plt.tick_params(axis='y', labelsize=14)
#         plt.scatter(df_complete.loc[triggerID]['x(km)'],
#                     df_complete.loc[triggerID]['y(km)'],
#                     s=exponent**df_complete.loc[triggerID]['magnitude'],
#                     facecolors='white',color='white')
#         d={'x':[],'y':[],'m':[],'t':[]}
#         #--- plot daughters
#         for daughter_id in d_trig[triggerID]:
#             t1 = df_complete['date'].iloc[ daughter_id ]
# #            if not ( Inside(t1,(tmin,tmax) ) and 
# #                     Inside(df_complete['y(m)'].iloc[daughter_id],(xmin,xmax)) and 
# #                     Inside(df_complete['z(m)'].iloc[daughter_id],(ymin,ymax)) ):
# #                continue
#     #--- remove pairs with r > cs.t
# #            if rmat[ triggerID, daughter_id ] > cs * tmat[ triggerID, daughter_id ]:
# #                print 'hello!'
# #                continue

#             d['t'].append( t1 )
#             d['x'].append(df_complete['x(km)'].iloc[daughter_id])
#             d['y'].append(df_complete['y(km)'].iloc[daughter_id])
#             d['m'].append(df_complete['magnitude'].iloc[daughter_id])
#         df=pd.DataFrame(d)
#         plt.scatter(df['x'],df['y'],
#                     s=exponent**df['m'],
#                     c=df['t'], cmap='jet') #,alpha=0.1)
#         plt.colorbar()
#         #--- plot mother
#         plt.scatter(df_complete['x(km)'].iloc[triggerID],df_complete['y(km)'].iloc[triggerID],
#                     s=exponent**df_complete['magnitude'].iloc[triggerID],
#                     marker='*',
#                     facecolors='yellow',color='black',
#                     alpha=1.0)
#         #--- 


    
# colors = itertools.cycle(["r", "b", "g","b"])
# #cs = 3 * (24*3600) #--- km per day

# #--- sort
# new_list = [[len(d_trig[triggerID]),triggerID] for triggerID in d_trig]
# new_list.sort(reverse=True)
# TRIG_ID = [i[1] for i in new_list]

# #--- xlimit
# swarm_lohi=GetCartesian(swarm_lohi)
# #swarm_lohi['y(m)']=swarm_lohi['x']
# #swarm_lohi['z(m)']=swarm_lohi['y']
# xmin = swarm_lohi['x(km)'].min()
# xmax = swarm_lohi['x(km)'].max()
# ymin = swarm_lohi['y(km)'].min()
# ymax = swarm_lohi['y(km)'].max()
# #plt.tick_params(axis='x', labelsize=14)
# #plt.tick_params(axis='y', labelsize=14)

# #--- plot arrows
# PlotMap( swarm_lohi, TRIG_ID,
# #          (pd.to_datetime('2010-04-04'),pd.to_datetime('2010-06-26')),
# #          (pd.to_datetime('2010-04-08'),pd.to_datetime('2010-04-14')),
#                        ( swarm['date'].min(), swarm['date'].max()), 
#            (xmin,xmax),
#            (ymin,ymax) )
In [59]:
#--- cluster life time


def GetPairsWithSpecifiedParentMag((m0,m1),catalog,df):
    df_parents = df.groupby(by='Parent_id').groups #--- parent events
    ds = catalog['magnitude'].loc[df_parents.keys()] #--- mag. of parent events
    ds_m=ds[(m0 <= ds) & (ds < m1)] #--- parent events with m0<m<m1
    df_m=df[pd.DataFrame([df['Parent_id'] ==k for k in ds_m.index]).any()] #--- data frame
    return df_m

#def AddDist( df_trig, df_complete ):
#    x=df_complete.loc[ df_trig['Event_id'] ]['r(m)'] 
#    y=df_complete.loc[ df_trig['Parent_id'] ]['r(m)']
#    df_trig['R'] = pd.Series(numpy.linalg.norm(x-y)))
#    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )

def AddTime( df_trig, df_complete ):
    prefact = 1.0 / ( 24.0 * 60 * 60 ) #--- daily
    x=df_complete.loc[ df_trig['Event_id'] ]
    y=df_complete.loc[ df_trig['Parent_id'] ]
    x.reset_index(inplace=True)
    y.reset_index(inplace=True)
    df_trig['T'] = x['date']-y['date']
    df_trig['T'] = df_trig['T'].apply(lambda x: x.total_seconds() * prefact)

def RemovePair( df, cs ):
    return df [ df[ 'R' ] <= df[ 'T' ] * cs ]

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)


#--- choose m0<m<m1
n = 7
#m = list(np.linspace(mc,swarm['magnitude'].max(),n))
m_parents = swarm_lohi.loc[nij_trig.groupby(by='Parent_id').groups.keys()]['magnitude']
m = list(np.linspace(m_parents.min()-1e-6,m_parents.max()+1e-6,n))
#m+=[swarm['magnitude'].max()]

X={}
rho_dict={}
m_range={}
Err={}
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
for i in xrange( len(m) - 1 ):
    m0 = m[ i ]
    m1 = m[ i + 1 ]
        
#    print m0, m1
#    nij_trig.groupby(by='Parent_id')
    
    df_trig = GetPairsWithSpecifiedParentMag((m0,m1),swarm_lohi,nij_trig) #--- get parent with m0<m<m1
    df_trig.reset_index( inplace = True, drop = True )
    #--- add distance & time column
#    df_trig['R'] = rmat[df_trig['Parent_id'],df_trig['Event_id']]
#    df_trig['T'] = tmat[df_trig['Parent_id'],df_trig['Event_id']]
#    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )
#    assert len ( df_trig[ df_trig['T'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['T'] == 0.0 ] )
    
    #--- remove pairs with r > cs.t
#    cs = 3 * (24*3600) #--- km per day
#    df_trig = RemovePair( df_trig, cs )

    #--- life time
    df_duration = df_trig.groupby(by='Parent_id')['T'].max()

    #--- rho plots
    n_decades = int( math.ceil( log(df_duration.max()/df_duration.min(),10) ) )
    nbin_per_decade = 3
    nbins =  nbin_per_decade * n_decades
    rho, xedges=np.histogram(df_duration, 
                             density=True,
                             bins=np.logspace(log(df_duration.min(),10),
                                              log(df_duration.max(),10),nbins))
    hist, xedges=np.histogram(df_duration,
                              bins=np.logspace(log(df_duration.min(),10),
                                               log(df_duration.max(),10),nbins))
    err = rho/np.sqrt(hist)
        
    
    #--- rho
#    plt.xlim(1e-3,1e2)
#    plt.ylim(1e-3,1e2)
    plt.xscale('log')
    plt.yscale('log')
    plt.xlabel('Duration(Day)')
    plt.ylabel('P(Duration)')
    plt.errorbar(xedges[:-1],rho, yerr=err,label='[%2.1f,%2.1f]'%(m0,m1))
    fontP = FontProperties()
    fontP.set_size('small')
    plt.legend(title='m',prop=fontP,loc='upper right',bbox_to_anchor=(1.5, 1))
    x=xedges[:-1]
    
    X[i] = x[:]
    rho_dict[i]=rho[:]
    m_range[i]=np.array([m0,m1])
    Err[i]=err[:]
#plt.savefig('%s/duration.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')

KeyErrorTraceback (most recent call last)
<ipython-input-59-e7305cf8dabd> in <module>()
     69 
     70     #--- life time
---> 71     df_duration = df_trig.groupby(by='Parent_id')['T'].max()
     72 
     73     #--- rho plots

/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/base.pyc in __getitem__(self, key)
    273         else:
    274             if key not in self.obj:
--> 275                 raise KeyError("Column not found: {key}".format(key=key))
    276             return self._gotitem(key, ndim=1)
    277 

KeyError: 'Column not found: T'
In [60]:
#--- histograms: magnitude of mother events

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)


mlist = swarm_lohi.loc[ nij_trig.groupby(by='Parent_id').groups.keys() ]['magnitude']
#plt.hist(mlist)

list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
mlist0= swarm_lohi['magnitude'].loc[df_zeros.index]


#--- accumulated histogram
N = len(mlist)+len(mlist0)
slist=mlist.tolist()+mlist0.tolist() #np.array(swarm_copy['magnitude'])
slist.sort()
d = histogramACCUMLTD( slist )
keys=d.keys()
keys.sort()

#--- plot
fig, ax = plt.subplots(figsize=(4,4))
ax.set_yscale('log')
ax.set_xlabel('$m$')
ax.set_ylabel('$N(m)$')
xmin=min(slist)
xmax=max(slist)
ax.axis([xmin,xmax,1,1000])
#ax.set_xticks(np.linspace(1,6,6))

dx=0.1
junk = ax.hist( slist,
                bins=int((xmax-xmin)/dx),
               label='histogram',color='silver') #--- histogram
xx=[];yy=[]
for ikey in keys:
    xx.append(d[ikey][0])
    yy.append(N-d[ikey][2])
ax.plot(xx,yy,
            linestyle='-', drawstyle='steps-post',color='black',
             linewidth=1.0) #--- accumulated
    
b=1.65
ax.plot([xmin,xmax],
        [1e3, 1e3* 10**(-b*(xmax-xmin))],'r-.',linewidth=1)

#DrawFrame(ax, (0.2,0.06),(0.14,0.06),0.1,LOG_Y=True)

#plt.savefig('%s/gr.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
Out[60]:
[<matplotlib.lines.Line2D at 0x7fcc49ae3390>]
In [61]:
#--- productivity relation

mpl.rcParams.update(mpl.rcParamsDefault)
warnings.filterwarnings('ignore') #--- get rid of warnings

rc('text', usetex=True)
font = {'size'   : 26}
matplotlib.rc('font', **font)

import numpy as np
import math

def GetPairsWithSpecifiedParentMag((m0,m1),catalog,df):
    df_parents = df.groupby(by='Parent_id').groups #--- parent events
    ds = catalog['magnitude'].loc[df_parents.keys()] #--- mag. of parent events
    ds_m=ds[(m0 <= ds) & (ds < m1)] #--- parent events with m0<m<m1
    df_m=df[pd.DataFrame([df['Parent_id'] ==k for k in ds_m.index]).any()] #--- data frame
    return df_m, np.mean( ds_m ), np.std( ds_m )/(len(ds_m))**0.5 #--- exponential: std=mean

#def AddDist( df_trig, df_complete ):
#    x=df_complete.loc[ df_trig['Event_id'] ]['r(m)'] 
#    y=df_complete.loc[ df_trig['Parent_id'] ]['r(m)']
#    df_trig['R'] = pd.Series(numpy.linalg.norm(x-y)))
#    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )

def AddTime( df_trig, df_complete ):
    prefact = 1.0 / ( 24.0 * 60 * 60 ) #--- daily
    x=df_complete.loc[ df_trig['Event_id'] ]
    y=df_complete.loc[ df_trig['Parent_id'] ]
    x.reset_index(inplace=True)
    y.reset_index(inplace=True)
    df_trig['T'] = x['date']-y['date']
    df_trig['T'] = df_trig['T'].apply(lambda x: x.total_seconds() * prefact)

def RemovePair( dff, cs ):
    df = dff.copy()
    df['R'] = rmat[df['Parent_id'],df['Event_id']]
    df['T'] = tmat[df['Parent_id'],df['Event_id']]
    assert len ( df[ df['R'] == 0.0 ] ) == 0, '%s'%display( df[ df['R'] == 0.0 ] )
    assert len ( df[ df['T'] == 0.0 ] ) == 0, '%s'%display( df[ df['T'] == 0.0 ] )
    return df [ df[ 'R' ] <= df[ 'T' ] * cs ]

def AdjustBins(data,bins):
    hist, xedges=np.histogram(data, #--- histogram
                              bins=bins)
    tmp = [xedges[0]]
    for i,j in zip(xedges[1:],hist): #--- expand bins 
        if j < 2:
            continue
        tmp.append(i)
#    tmp.append(xedges[-1])
    if hist[-1] < 2:
        tmp.pop(-2)
    return tmp 

def GetParentsMagnitudes( swarm_lohi, df_trig ):
    mlist = swarm_lohi.loc[ df_trig.groupby(by='Parent_id').groups.keys() ]['magnitude']

    list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
    df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
    mlist0= swarm_lohi['magnitude'].loc[df_zeros.index]

    return mlist.tolist()+mlist0.tolist()

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)



#--- remove pairs with r > cs.t
#cs = 3 * (24*3600) #--- km per day
df_trig = nij_trig.copy()

#--- parents' magnitudes
m_list = GetParentsMagnitudes( swarm_lohi, df_trig )

#--- number of daughters for each mother
df_nn = df_trig.groupby(by='Parent_id').count()['Event_id']


#--- include daughters with zero children
list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)


#--- scattered plot
fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
#ax.set_xlim(mc,swarm_lohi['magnitude'].max()+0.5)
m_min = min(m_list)-1.0e-6
m_max = max(m_list)+1.0e-6
ax.set_xlim(np.floor(m_min),np.ceil(m_max))
ax.set_ylim(0.2,10**np.ceil(np.log10(df_nn.max())))
ax.set_yscale('log')
ax.plot( swarm_lohi['magnitude'].loc[df_nn.index],
           df_nn,
          'x',color='black',alpha=0.25,zorder=1,label='')
ax.legend(fontsize=0)

ax2=ax.twinx()
ax2.set_yscale('linear')
ax2.set_xlim(np.floor(m_min),np.ceil(m_max))
ax2.set_ylim(-.02,1)
ax2.plot( swarm_lohi['magnitude'].loc[df_zeros.index],
         df_zeros,
          'x',color='black',alpha=0.25)
ax2.tick_params(labelright=False,labelleft=False,left=False, right=False)

#plt.yscale('log')
#plt.xlabel('$m$')
#plt.ylabel(r'$N_{as}$')
ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')

#--- choose m0<m<m1
dm = 0.1
n = int( ( m_max - m_min ) / dm )
m = list(np.linspace(m_min,m_max,n))
m=AdjustBins(m_list,m)
Mlist=m

m_mean=np.array(np.zeros(len(m) - 1))
m_err=np.array(np.zeros(len(m) - 1))
N_as = np.array(np.zeros(len(m) - 1))
err_N = np.array(np.zeros(len(m) - 1))
for i in xrange( len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        df, m_mean[ i ], m_err[i] = GetPairsWithSpecifiedParentMag((m0,m1),
                                                         swarm_lohi,df_trig) #--- get parent with m0<m<m1
        df.reset_index( inplace = True, drop = True )
        
        #--- number of daughters for each mother
        df_n = df.groupby(by='Parent_id').count()['Event_id']
    
        #--- include daughters with zero children
        list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
        subset = swarm_lohi.loc[list_of_zero_afs]
        list_of_zero_afs = subset[(subset['magnitude'] >= m0) & (subset['magnitude'] <= m1)].index    
        df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
        
        N_as[ i ] = 1.0*(df_n.sum()+ df_zeros.sum())/(len(df_n)+len(df_zeros))
        err_N[ i ] = (N_as[ i ]/(len(df_n)+len(df_zeros)))**0.5 #--- poisson
#        err_N[ i ]=np.std(df_n.tolist()+df_zeros.tolist())/(len(df_n)+len(df_zeros))**0.5
    except:
        print traceback.print_exc()
        continue
    
#ax.plot(m, np.append(N_as,N_as[-1]), linestyle='-', drawstyle='steps-post',color='red')
ax.errorbar(m_mean,N_as,
            yerr=err_N,xerr=m_err,
            markerfacecolor='white',
            fmt='o', markersize=8,color='red',markeredgewidth=0.7,
            linewidth=.5,
            barsabove=None,capsize=5,capthick=1,elinewidth=1,
           zorder=2)


#--- add major xticks
xmin=np.ceil(ax.axis()[0])
xmax=np.floor(ax.axis()[1])
nbin = xmax - xmin
ax.set_xticks(np.linspace(xmin,xmax,int(nbin)+1))

DrawFrame(ax, (0.24,0.08),(0.14,0.06),0.01,LOG_Y=True)

#from scipy.optimize import curve_fit
def func(x,alpha,c):
    return c*10**(alpha*x)
#popt, pconv = curve_fit(func,m_mean,N_as)

popt = np.polyfit(m_mean,np.log10(N_as), 1)
popt[1] = 10**(popt[1])
alpha_p = popt[0]

mm = swarm_lohi['magnitude'].loc[df_zeros.index].tolist() + \
     swarm_lohi['magnitude'].loc[df_nn.index].tolist() #swarm_lohi[swarm_lohi['magnitude']>=mc]['magnitude']
mm = np.array(mm)
mm=list(set(mm))
mm.sort()
mm=np.array(mm)

ax.plot(mm,func(mm,*popt),'-.g',zorder=3,label='$N\propto 10^{%3.2f m}$'%popt[0])
ax.legend(fontsize='xx-small',frameon=False)
ax.set_title('$q=%d,d_f=%s$'%(quantile*100,Df),fontsize='xx-small')
#print 'alpha=%s'%popt[0]
#print pconv**0.5

plt.savefig('%s/prod.png'%DIR_OUTPT_figs,dpi=75*.75,bbox_inches='tight')

mpl.rcParams.update(mpl.rcParamsDefault)
warnings.filterwarnings('ignore') #--- get rid of warnings

rc('text', usetex=True)
font = {'size'   : 20 }
matplotlib.rc('font', **font)

np.save('%s/alpha'%DIR_OUTPT_figs,alpha_p)


plt.show()
No handlers could be found for logger "matplotlib.legend"
Traceback (most recent call last):
  File "<ipython-input-61-5ac3aa13af5d>", line 136, in <module>
    swarm_lohi,df_trig) #--- get parent with m0<m<m1
  File "<ipython-input-61-5ac3aa13af5d>", line 17, in GetPairsWithSpecifiedParentMag
    df_m=df[pd.DataFrame([df['Parent_id'] ==k for k in ds_m.index]).any()] #--- data frame
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/frame.py", line 2918, in __getitem__
    return self._getitem_bool_array(key)
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/frame.py", line 2967, in _getitem_bool_array
    key = check_bool_indexer(self.index, key)
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/indexing.py", line 2486, in check_bool_indexer
    raise IndexingError('Unalignable boolean Series provided as '
IndexingError: Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match
Traceback (most recent call last):
  File "<ipython-input-61-5ac3aa13af5d>", line 136, in <module>
    swarm_lohi,df_trig) #--- get parent with m0<m<m1
  File "<ipython-input-61-5ac3aa13af5d>", line 17, in GetPairsWithSpecifiedParentMag
    df_m=df[pd.DataFrame([df['Parent_id'] ==k for k in ds_m.index]).any()] #--- data frame
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/frame.py", line 2918, in __getitem__
    return self._getitem_bool_array(key)
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/frame.py", line 2967, in _getitem_bool_array
    key = check_bool_indexer(self.index, key)
  File "/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/indexing.py", line 2486, in check_bool_indexer
    raise IndexingError('Unalignable boolean Series provided as '
IndexingError: Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match
None
None

multiple realizations

In [62]:
from IPython.display import Image

alpha_arr=[]
for irun in xrange(16):
    path = 'runFillmore/Run%s'%irun
    a=np.load('%s/alpha.npy'%path)
    alpha_arr.append(a.item())
    #--- load image
    display(Image(filename='%s/prod.png'%path))
df=pd.DataFrame({'mean':[np.mean(alpha_arr)],'std_dev':np.std(alpha_arr),
                        'std_err':[np.std(alpha_arr)/len(alpha_arr)**.5]},
                          index=[r'$\alpha$'])
display(df)
df['mean']+2*df['std_dev'],df['mean']-2*df['std_dev']                       
                       

IOErrorTraceback (most recent call last)
<ipython-input-62-06e92f3df818> in <module>()
      4 for irun in xrange(16):
      5     path = 'runFillmore/Run%s'%irun
----> 6     a=np.load('%s/alpha.npy'%path)
      7     alpha_arr.append(a.item())
      8     #--- load image

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/lib/npyio.pyc in load(file, mmap_mode, allow_pickle, fix_imports, encoding)
    370     own_fid = False
    371     if isinstance(file, basestring):
--> 372         fid = open(file, "rb")
    373         own_fid = True
    374     elif is_pathlib_path(file):

IOError: [Errno 2] No such file or directory: 'runFillmore/Run0/alpha.npy'
In [63]:
#--- number distributions


#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)



#--- remove pairs with r > cs.t
#cs = 3 * (24*3600) #--- km per day
df_trig = nij_trig.copy()

#--- parents' magnitudes
m_list = GetParentsMagnitudes( swarm_lohi, df_trig )

#--- number of daughters for each mother
df_nn = df_trig.groupby(by='Parent_id').count()['Event_id']


#--- include daughters with zero children
list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)

#--- scattered plot
fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
ax.plot( swarm_lohi['magnitude'].loc[df_nn.index],
           df_nn,
          'x',color='gray',alpha=1.0)
ax.plot( swarm_lohi['magnitude'].loc[df_zeros.index],
         df_zeros,
          'x',color='gray')
ax.set_yscale('log')


#--- parents' magnitudes
m_list = GetParentsMagnitudes( swarm_lohi, df_trig )

#--- number of daughters for each mother
df_nn = df_trig.groupby(by='Parent_id').count()['Event_id']


#--- include daughters with zero children
list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)


#--- scattered plot
m_min = min(m_list)-1e-6 #0.05 #-1.0e-6
m_max = max(m_list)+1.0e-6 #3.299999333333333 #max(m_list)+0.05 #+1.0e-6 #--- exclude the largest event

#--- choose m0<m<m1
dm = 0.4
n = int( ( m_max - m_min ) / dm )
m = [m_min,m_max] #list(np.linspace(m_min,m_max,n+1))
Mlist=m

m_mean=np.array(np.zeros(len(m) - 1))
m_err=np.array(np.zeros(len(m) - 1))
N_as = np.array(np.zeros(len(m) - 1))
err_N = np.array(np.zeros(len(m) - 1))
for i in xrange(len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        df, m_mean[ i ], m_err[i] = GetPairsWithSpecifiedParentMag((m0,m1),
                                                         swarm_lohi,df_trig) #--- get parent with m0<m<m1
        df.reset_index( inplace = True, drop = True )
        
        #--- number of daughters for each mother
        df_n = df.groupby(by='Parent_id').count()['Event_id']
    
        #--- include daughters with zero children
        list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
        subset = swarm_lohi.loc[list_of_zero_afs]
        list_of_zero_afs = subset[(subset['magnitude'] >= m0) & (subset['magnitude'] <= m1)].index    
        df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
        
#        fig.hist(df_n)
#        if m1 > 3.3:
#            continue
        fig = plt.figure(figsize=(4,4),frameon=True)
        ax = fig.add_subplot(111)
        junk = ax.hist( df_n.tolist()+df_zeros.tolist(),
                bins=1024,#int((xmax-xmin)/dx),
               label='%3.2f-%3.2f'%(m0,m1),color='silver') #--- histogram
        print '#=',junk[0][junk[0]!=0]
        print 'N=',junk[1][:-1][junk[0]!=0]
        ax.set_xlabel('$N_{as}$')
        ax.set_ylabel('number')
#        ax.set_ylim(-100,ax.axis[3])
        fig.legend()
        #--- mean
#        print [m0,m1],list_of_zero_afs
#        pdb.set_trace()
        print 'n_ms=',len(df_n)#+len(df_zeros)
        print 'n_ms(0)=',len(df_zeros)
        N_as[ i ] = 1.0*(df_n.sum()+ df_zeros.sum())/(len(df_n)+len(df_zeros))
        err_N[ i ] = (N_as[ i ]/(len(df_n)+len(df_zeros)))**0.5 #--- poisson
#        err_N[ i ]=np.std(df_n.tolist()+df_zeros.tolist())/(len(df_n)+len(df_zeros))**0.5
    except:
        continue
    
#= [713. 240.  44.  14.   8.   4.   4.   1.   2.   2.   1.   1.   1.   1.
   1.]
N= [  0.           0.97265625   1.9453125    2.91796875   3.890625
   4.86328125   5.8359375    7.78125      8.99707031   9.96972656
  10.94238281  11.91503906  20.91210938  40.8515625  248.75683594]
n_ms= 324
n_ms(0)= 713
In [64]:
#--- choose bins for rho and lambda

import traceback

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)



#--- remove pairs with r > cs.t
#cs = 3 * (24*3600) #--- km per day
df_trig = nij_trig.copy()

#--- parents' magnitudes
m_list = GetParentsMagnitudes( swarm_lohi, df_trig )

#--- number of daughters for each mother
df_nn = df_trig.groupby(by='Parent_id').count()['Event_id']


#--- include daughters with zero children
list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)


#--- scattered plot
fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
#ax.set_xlim(mc,swarm_lohi['magnitude'].max()+0.5)
m_min = min(m_list)-1.0e-6
m_max = max(m_list)+1.0e-6 #--- exclude the largest event
ax.set_xlim(m_min,np.ceil(m_max))
ax.set_ylim(0.1,1000)
ax.plot( swarm_lohi['magnitude'].loc[df_nn.index],
           df_nn,
          'o',color='black',alpha=.1)
ax.plot( swarm_lohi['magnitude'].loc[df_zeros.index],
         df_zeros,
          'x',color='gray')

plt.yscale('log')
#plt.xlabel('$m$')
#plt.ylabel(r'$N_{as}$')

#--- choose m0<m<m1
dm = 0.75 #0.5
n = int( (6.0 - m_min ) / dm )
m = list(np.linspace(m_min,6.0,n))
#m=AdjustBins(m_list,m)
# m=[2.7999989999999997,
#  3.133332555555555,
#  3.466666111111111,
#  3.7999996666666664,
#  4.133333222222222,
#  4.466666777777778,
#  4.800000333333333,
#  5.800001]
m+=[6.41,7.1-0.01,7.1+0.01]
Mlist=m

m_mean=np.array(np.zeros(len(m) - 1))
m_err=np.array(np.zeros(len(m) - 1))
N_as = np.array(np.zeros(len(m) - 1))
err_N = np.array(np.zeros(len(m) - 1))
for i in xrange( len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        df, m_mean[ i ], m_err[i] = GetPairsWithSpecifiedParentMag((m0,m1),
                                                         swarm_lohi,df_trig) #--- get parent with m0<m<m1
        df.reset_index( inplace = True, drop = True )
#        pdb.set_trace()
        #--- number of daughters for each mother
        df_n = df.groupby(by='Parent_id').count()['Event_id']
    
        #--- include daughters with zero children
        list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
        subset = swarm_lohi.loc[list_of_zero_afs]
        list_of_zero_afs = subset[(subset['magnitude'] >= m0) & (subset['magnitude'] <= m1)].index    
        df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
        #--- mean
#        print [m0,m1],list_of_zero_afs
#        pdb.set_trace()
        
        N_as[ i ] = 1.0*(df_n.sum()+ df_zeros.sum())/(len(df_n)+len(df_zeros))
        err_N[ i ] = (N_as[ i ]/(len(df_n)+len(df_zeros)))**0.5 #--- poisson
#        err_N[ i ]=np.std(df_n.tolist()+df_zeros.tolist())/(len(df_n)+len(df_zeros))**0.5
    except:
#        traceback.print_exc()
        continue
    
#ax.plot(m, np.append(N_as,N_as[-1]), linestyle='-', drawstyle='steps-post',color='red')
ax.errorbar(m_mean,N_as,
            yerr=err_N,xerr=m_err,
            markerfacecolor='white',
            fmt='o', markersize=8,color='red',markeredgewidth=0.7,
            linewidth=.5,
            barsabove=None,capsize=5,capthick=1,elinewidth=1)


#--- add major xticks
xmin=np.ceil(ax.axis()[0])
xmax=np.floor(ax.axis()[1])
nbin = xmax - xmin
ax.set_xticks(np.linspace(xmin,xmax,int(nbin)+1))

DrawFrame(ax, (0.15,0.08),(0.14,0.06),0.1,LOG_Y=True)

try:
    from scipy.optimize import curve_fit
    def func(x,c,alpha):
        return c*10**(alpha*x)
    popt, pconv = curve_fit(func,m_mean[m_mean>=mc],N_as[m_mean>=mc])
    
    mm = swarm_lohi['magnitude'].loc[df_zeros.index].tolist() + \
         swarm_lohi['magnitude'].loc[df_nn.index].tolist() #swarm_lohi[swarm_lohi['magnitude']>=mc]['magnitude']
    mm = np.array(mm)
    mm=list(set(mm))
    mm.sort()
    mm=np.array(mm)
    ax.plot(mm,func(mm,*popt),'-.g')
    print popt
    print pconv**0.5
except:
    pass

#plt.savefig('%s/prod.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')


fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
ax.plot(m, np.append(N_as,N_as[-1]), linestyle='-', drawstyle='steps-post',color='red')
ax.errorbar(m_mean,N_as,
            yerr=err_N,xerr=m_err,
            markerfacecolor='white',
            fmt='o', markersize=8,color='red',markeredgewidth=0.7,
            linewidth=.5,
            barsabove=None,capsize=5,capthick=1,elinewidth=1)
ax.set_xlim(m_min,np.ceil(m_max))
ax.set_ylim(0.1,1000)
ax.set_yscale('log')
ax.set_xticks(np.linspace(xmin,xmax,int(nbin)+1))
plt.show()
[0.21956281 0.17631794]
[[0.11981937        nan]
 [       nan 0.04908487]]
In [65]:
m
Out[65]:
[1.999999,
 2.99999925,
 3.9999995000000004,
 4.99999975,
 6.0,
 6.41,
 7.09,
 7.109999999999999]
In [66]:
#--- density plots: rho(r)
#--- split based on the mainshock's magnitude

import numpy as np
import math

#def AdjustBins(data,bins):
#    hist, xedges=np.histogram(data, #--- histogram
#                              bins=bins)
#    tmp = [xedges[0]]
#    for i,j in zip(xedges[1:],hist): #--- expand bins 
#        if j < 2:
#            continue
#        tmp.append(i)
#    tmp.append(xedges[-1])
#    if hist[-1] < 2:
#        tmp.pop(-2)
#    return tmp 

#def GetPairsWithSpecifiedParentMag((m0,m1),catalog,df):
#    df_parents = df.groupby(by='Parent_id').groups #--- parent events
#    ds = catalog['magnitude'].loc[df_parents.keys()] #--- mag. of parent events
#    ds_m=ds[(m0 <= ds) & (ds < m1)] #--- parent events with m0<m<m1
#    df_m=df[pd.DataFrame([df['Parent_id'] ==k for k in ds_m.index]).any()] #--- data frame
#    return df_m, np.mean( ds_m )

#def AddDist( df_trig, df_complete ):
#    x=df_complete.loc[ df_trig['Event_id'] ]['r(m)'] 
#    y=df_complete.loc[ df_trig['Parent_id'] ]['r(m)']
#    df_trig['R'] = pd.Series(numpy.linalg.norm(x-y)))
#    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )

def AddTime( df_trig, df_complete ):
    prefact = 1.0 / ( 24.0 * 60 * 60 ) #--- daily
    x=df_complete.loc[ df_trig['Event_id'] ]
    y=df_complete.loc[ df_trig['Parent_id'] ]
    x.reset_index(inplace=True)
    y.reset_index(inplace=True)
    df_trig['T'] = x['date']-y['date']
    df_trig['T'] = df_trig['T'].apply(lambda x: x.total_seconds() * prefact)

def RemovePair( df, cs ):
    return df [ df[ 'R' ] <= df[ 'T' ] * cs ]

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)

##--- parents' magnitudes
#df_trig = nij_trig.copy()
#m_list = GetParentsMagnitudes( swarm_lohi, df_trig )
#m_min = min(m_list)-1.0e-6
#m_max = max(m_list)+1.0e-6 #--- exclude the largest event
#
##--- choose m0<m<m1
#dm = 0.3
#n = int( ( m_max - m_min ) / dm )
#m = list(np.linspace(m_min,m_max,n))
#m=AdjustBins(m_list,m)
Mlist=m

#--- choose m0<m<m1
X={}
rho_dict={}
m_range={}
Err={}
m_mean={}
for i in xrange( len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        
        df_trig, m_mean[ i ],junk = GetPairsWithSpecifiedParentMag((m0,m1),swarm_lohi,nij_trig) #--- get parent with m0<m<m1
        df_trig.reset_index( inplace = True, drop = True )
        
        #--- add distance & time column
    #    AddDist( df_trig, df_complete )
    #    df_trig['R'] = rmat[df_trig['Parent_id'],df_trig['Event_id']]
    #    df_trig['T'] = tmat[df_trig['Parent_id'],df_trig['Event_id']]
    #    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )
    #    AddTime( df_trig, df_complete )
        
        #--- remove pairs with r > cs.t
    #    cs = 3 * (24*3600) #--- km per day
    #    df_trig = RemovePair( df_trig, cs )
    
        #--- rho plots
        n_decades = int( math.ceil( log(df_trig['R'].max()/df_trig['R'].min(),10) ) )
        nbin_per_decade = 4
        nbins =  nbin_per_decade * n_decades
        
        bins = np.logspace(log(df_trig['R'].min(),10)-1.e-10,
                                                  log(df_trig['R'].max(),10)+1.e-10,nbins)
        bins = AdjustBins(df_trig['R'],bins)
    
        rho, xedges=np.histogram(df_trig['R'], 
                                 density=True,
                                 bins=bins)
        hist, xedges=np.histogram(df_trig['R'],
                                  bins=bins)
        x_bins, junk=np.histogram(df_trig['R'], #--- histogram
                                  bins=bins,
                                 weights = df_trig['R'])
    
        #--- rm empty bins
        x_bins = x_bins[ hist != 0 ]
        rho = rho[ hist != 0 ]
        hist = hist[ hist != 0 ]
        
        #--- poisson err 
        err = rho/np.sqrt(hist)
        x_bins /= hist
                
        #--- scattered plot
        fig=plt.figure(figsize=(8,4))
        ax=fig.add_subplot(1,2,1)
        ax.set_title('$%2.1f<m<%2.1f$'%(m0,m1))
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$t$')
        ax.set_ylabel('$r$')
        ax.set_xlim(1e-5,10)
        ax.set_ylim(1e-4,1e2)
        ax.scatter(df_trig['T'],df_trig['R'],alpha=0.1)
        
        #--- discretization effects
#         dt_min=df_trig['T'].min()
#         dt_max=df_trig['T'].max()
#         dr_min=df_trig['R'].min()
#         dr_max=df_trig['R'].max()
        
#         ax.plot([dt_min,dt_min],[dr_min,dr_max],
#                  '-',color='blue')
#         ax.plot([dt_max,dt_max],[dr_min,dr_max],
#                  '-',color='blue')
#         ax.plot([dt_min,dt_max],[dr_min,dr_min],
#                  '-',color='blue')
#         ax.plot([dt_min,dt_max],[dr_max,dr_max],
#                  '-',color='blue')
        
#         ax.set_ylim(dr_min/2,dr_max*2)
#         ax.plot([dt_min,dt_max],[dt_min*cs,dt_max*cs],
#                  '-',color='red')
        
        #--- rho
        ax=fig.add_subplot(1,2,2)
        ax.set_xlim(1e-4,1e2)
        ax.set_ylim(1e-5,1e1)
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$r$')
        ax.set_ylabel('$P(r)$')
        ax.errorbar(x_bins,rho, yerr=err)
            
        X[i] = x_bins[:]
        rho_dict[i]=rho[:]
        m_range[i]=np.array([m0,m1])
        Err[i]=err[:]
#        plt.savefig('%s/rho_r.%s.png'%(DIR_OUTPT_figs,i),dpi=50,bbox_inches='tight')
    except:
        continue

plt.show()

rupture length

In [67]:
#--- density plots: rho(r)
#--- split based on the mainshock's magnitude

import numpy as np
import math


#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)


XX={}
rho_dictt={}
Errr={}
m_meann={}
r_rupp = {}

groups_dic=nij_trig.groupby(by='Parent_id').groups
for parent_id in groups_dic: 
#    print parent_id
    df_trig = nij_trig.loc[groups_dic[parent_id]]
    df_trig.reset_index( inplace = True, drop = True )
#    display(df_trig )
    if len(df_trig) < 2:
        continue
    #--- rho plots
    n_decades = int( math.ceil( log(df_trig['R'].max()/df_trig['R'].min(),10) ) )
    nbin_per_decade = 4
    nbins =  nbin_per_decade * n_decades
    
    bins = np.logspace(log(df_trig['R'].min(),10)-1.e-10,
                                              log(df_trig['R'].max(),10)+1.e-10,nbins)
#    bins = AdjustBins(df_trig['R'],bins)

    rho, xedges=np.histogram(df_trig['R'], 
                             density=True,
                             bins=bins)
    hist, xedges=np.histogram(df_trig['R'],
                              bins=bins)
    x_bins, junk=np.histogram(df_trig['R'], #--- histogram
                              bins=bins,
                             weights = df_trig['R'])

    #--- rm empty bins
    x_bins = x_bins[ hist != 0 ]
    rho = rho[ hist != 0 ]
    hist = hist[ hist != 0 ]
    
    #--- poisson err 
    err = rho/np.sqrt(hist)
    x_bins /= hist
            

    i = parent_id    
    XX[i] = x_bins[:]
    rho_dictt[i]=rho[:]
    m_meann[i]=swarm_lohi['magnitude'].loc[i]
    Errr[i]=err[:]
    r_rupp[i]=np.std(df_trig['R'])

m_meann=pd.Series(m_meann)
r_rupp=pd.Series(r_rupp)

#--- fit
def func(x,alpha,c):
    return c*10**(alpha*x)

popt = np.polyfit(m_meann,np.log10(r_rupp), 1)
popt[1] = 10**(popt[1])



#--- plot
fig = plt.figure(figsize=(4,4))
ax=fig.add_subplot(111)
#ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$m$')
ax.set_ylabel('$r_{rup}$[km]')
#ax.set_xlim(2,6)
#ax.set_ylim(1e-4,1e2)
ax.scatter(m_meann,r_rupp,alpha=0.2)


ax.plot(m_meann,func(m_meann,*popt),'-.r',zorder=3,
                     label='$r_{rup}\propto 10^{%3.2f m},r_c=%3.2e$'%(popt[0],popt[1]))
ax.legend(fontsize='xx-small',frameon=False)
#ax.set_title('q=%d%%,df=%s'%(quantile*100,Df),fontsize='xx-small')
#print 'alpha=%s'%popt[0]
sigma=popt[0]
rc=popt[1] #10**np.ceil(np.log10(popt[1]))*2
####plt.savefig('%s/rupture_mag.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight',transparent=True)
plt.show()

KeyErrorTraceback (most recent call last)
<ipython-input-67-6fb89337bb89> in <module>()
     30         continue
     31     #--- rho plots
---> 32     n_decades = int( math.ceil( log(df_trig['R'].max()/df_trig['R'].min(),10) ) )
     33     nbin_per_decade = 4
     34     nbins =  nbin_per_decade * n_decades

/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/frame.pyc in __getitem__(self, key)
   2925             if self.columns.nlevels > 1:
   2926                 return self._getitem_multilevel(key)
-> 2927             indexer = self.columns.get_loc(key)
   2928             if is_integer(indexer):
   2929                 indexer = [indexer]

/home/kamran.karimi1/.local/lib/python2.7/site-packages/pandas/core/indexes/base.pyc in get_loc(self, key, method, tolerance)
   2657                 return self._engine.get_loc(key)
   2658             except KeyError:
-> 2659                 return self._engine.get_loc(self._maybe_cast_indexer(key))
   2660         indexer = self.get_indexer([key], method=method, tolerance=tolerance)
   2661         if indexer.ndim > 1 or indexer.size > 1:

pandas/_libs/index.pyx in pandas._libs.index.IndexEngine.get_loc()

pandas/_libs/index.pyx in pandas._libs.index.IndexEngine.get_loc()

pandas/_libs/hashtable_class_helper.pxi in pandas._libs.hashtable.PyObjectHashTable.get_item()

pandas/_libs/hashtable_class_helper.pxi in pandas._libs.hashtable.PyObjectHashTable.get_item()

KeyError: 'R'

$r_{rup}$ by fitting

In [68]:
from scipy.optimize import curve_fit

#def func(x,pref, xmean,sigma):
#    return pref * np.exp(-((x-xmean)/sigma)**2)

def func(x,a,b1,b2,rc):
    return a*x**b1/(1.0+(x/rc)**b2)

#def func2(x,c,alpha):
#    return c*10**(alpha*x)

#--- fit
def func2(x,alpha,c):
    return c*10**(alpha*x)

colors = ['black','red','green','blue','cyan','brown','violet','magenta','orange','yellow']


fig=plt.figure(figsize=(8,4))
ax=fig.add_subplot(121)
#ax.axis([2e-3,2e-3*1e1/1e-3,1e-3,1e1])
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$r$')
ax.set_ylabel('$P(r)$')


r_peak=[]
mm_mean=[]
p0 = (1.0,Df-1,2.0,rc)
xmin = ymin = sys.maxint
ymax=xmax=-sys.maxint
for key, col in zip(X,colors):
    try:
#        if key > 6:
#            continue
#        print np.mean(X[key])

        if np.max(X[key]) > xmax:
            xmax = np.max(X[key])
        if np.min(rho_dict[key]) < ymin:
            ymin = np.min(rho_dict[key])
        if np.max(rho_dict[key]) > ymax:
            ymax = np.max(rho_dict[key])
        
        
        ax.errorbar(X[key],rho_dict[key], 
                     yerr=Err[key],
                    linestyle='None', color=col,
                     fmt='-o', markersize=8
                    )            
        popt, pcov = curve_fit(func, X[key], rho_dict[key],
                               sigma=Err[key], 
#                               bounds=((0.0,0.0,0.0), (np.inf,np.inf,np.inf)),
                               p0=p0
                              )
        mm_mean+=[m_mean[key]]
        p0 = popt[:]
        (a,b1,b2,rc)=popt
#        print b2-b1
        r_peak += [rc]
        ax.plot(X[key], func(X[key], *popt),'-',color=col)#,label='fit: r=%3.2f' % (popt[1]))
        ax.legend()
    except:
        traceback.print_exc()
#        print key
        continue

(ylo,yhi)=(10**np.floor(np.log10(ymin)),\
           10**np.ceil(np.log10(ymax)))
xhi=10**np.ceil(np.log10(xmax))
ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])

try:
    ax=fig.add_subplot(122)
    ax.set_yscale('log')
    ax.set_xlabel('$m$')
    ax.set_ylabel('$r_{rup}$')
    ax.plot(mm_mean,r_peak,'o')
    ylo=10**np.floor(np.log10(np.min(r_peak)))
    yhi=10**np.ceil(np.log10(np.max(r_peak)))
    ax.set_ylim(ylo,yhi)
    #
    popt = np.polyfit( mm_mean,np.log10(r_peak),1 )
    sigma=popt[0]
    rc=10**popt[1] #10**np.ceil(np.log10(popt[1]))
    rc0=rc
    #
    ax.plot(mm_mean,func2( np.array(mm_mean),sigma,rc),'--r',
            label='$r_{rup}\propto 10^{%3.2f m},r_c=%2.1e$'%(sigma,rc))
    ax.legend(fontsize='xx-small',frameon=False)
    
    fig.tight_layout()
except:
    traceback.print_exc()
    pass
ax.set_title('$q=%d,d_f=%s$'%(quantile*100,Df),fontsize='xx-small')

plt.show()
#fig.savefig('%s/r_rupt.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
np.save('%s/sigma'%DIR_OUTPT_figs,sigma)

ValueErrorTraceback (most recent call last)
<ipython-input-68-2153610d652f> in <module>()
     69 (ylo,yhi)=(10**np.floor(np.log10(ymin)),           10**np.ceil(np.log10(ymax)))
     70 xhi=10**np.ceil(np.log10(xmax))
---> 71 ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])
     72 
     73 try:

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in axis(self, *v, **kwargs)
   1628             raise ValueError('v must contain [xmin xmax ymin ymax]')
   1629 
-> 1630         self.set_xlim([v[0], v[1]], emit=emit, auto=False)
   1631         self.set_ylim([v[2], v[3]], emit=emit, auto=False)
   1632 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in set_xlim(self, left, right, emit, auto, **kw)
   2946 
   2947         self._process_unit_info(xdata=(left, right))
-> 2948         left = self._validate_converted_limits(left, self.convert_xunits)
   2949         right = self._validate_converted_limits(right, self.convert_xunits)
   2950 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in _validate_converted_limits(self, limit, convert)
   2874                     (not np.isreal(converted_limit) or
   2875                         not np.isfinite(converted_limit))):
-> 2876                 raise ValueError("Axis limits cannot be NaN or Inf")
   2877             return converted_limit
   2878 

ValueError: Axis limits cannot be NaN or Inf

different realizations

In [69]:
alpha_arr=[] #---
for irun in xrange(16):
    path = 'runFillmore/Run%s'%irun
    a=np.load('%s/sigma.npy'%path)
    alpha_arr.append(a.item())
    #--- load image
    display(Image(filename='%s/r_rupt.png'%path))
df=pd.DataFrame({'mean':[np.mean(alpha_arr)], 'std_dev':np.std(alpha_arr),
                        'std_err':[np.std(alpha_arr)/len(alpha_arr)**.5]},
             index=[r'$\sigma$'])
display(df)
df['mean']+2*df['std_dev'],df['mean']-2*df['std_dev']
                    
                       

IOErrorTraceback (most recent call last)
<ipython-input-69-2576422cca50> in <module>()
      2 for irun in xrange(16):
      3     path = 'runFillmore/Run%s'%irun
----> 4     a=np.load('%s/sigma.npy'%path)
      5     alpha_arr.append(a.item())
      6     #--- load image

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/lib/npyio.pyc in load(file, mmap_mode, allow_pickle, fix_imports, encoding)
    370     own_fid = False
    371     if isinstance(file, basestring):
--> 372         fid = open(file, "rb")
    373         own_fid = True
    374     elif is_pathlib_path(file):

IOError: [Errno 2] No such file or directory: 'runFillmore/Run0/sigma.npy'
In [70]:
#--- density plots: rho(r) 2nd


plt.figure(figsize=(15,5))

#--- plot rho
plt.subplot(1,3,1)
plt.xscale('log')
plt.yscale('log')
plt.xlabel(r'$r$')
plt.ylabel(r'$P(r)$')
plt.xlim(1e-3,1e2)
plt.ylim(1e-4,1e1)
for key in X:
#    if key < 3:
#        continue
    plt.errorbar(X[key],rho_dict[key],
                 yerr=Err[key],
                 fmt='-o', markersize=8
                )

#--- rescaled in x and y
plt.subplot(1,3,2)
#sigma=sigma #0.5
plt.xscale('log')
plt.yscale('log')
plt.xlabel(r'$R=r/10^{%3.2f m}$'%sigma)
plt.ylabel(r'$P(R)=P(r)\times 10^{%3.2f m}$'%sigma)
plt.xlim(1e-5,1e0)
plt.ylim(1e-2,1e3)

xdata = []
ydata = []
y_err = []
for key in X:
#    if key > 2:
#        continue
    xdata += list(X[key]/10**(sigma*np.mean(m_range[key])))
    ydata += list(rho_dict[key]*10**(sigma*np.mean(m_range[key])))
    y_err += list(Err[key]*10**(sigma*m_mean[key]))
    plt.errorbar(X[key]/10**(sigma*m_mean[key]),
                 rho_dict[key]*10**(sigma*m_mean[key]),
                 yerr=Err[key]*10**(sigma*m_mean[key]),
                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8
                )
plt.legend(title='m',loc='upper right',bbox_to_anchor=(0.4, .6),fontsize='x-small')

#--- rescaled by r^-\nu
nu = -2.0
plt.subplot(133)
plt.xscale('log')
plt.yscale('log')
plt.xlabel(r'$R$')
plt.ylabel(r'$P(R)\times R^{%3.2f}$'%nu)
#plt.ylabel(r'$\lambda(t) \times t^p$')
plt.xlim(1e-4,1e0)
#plt.ylim(1e-6,1e-4)
KEYS=X.keys()
KEYS.sort()
for key, index in zip(KEYS,xrange(100)):
#    if key < 3:
#        continue
    x_new = X[key]/10**(sigma*m_mean[key])
    y_new = rho_dict[key]*10**(sigma*m_mean[key])
    y2_new = Err[key]*10**(sigma*m_mean[key])
    plt.errorbar(x_new,
                 y_new/x_new**nu,
                 yerr=y2_new/x_new**nu, 
                 fmt='-o', markersize=8
                )
    
    
plt.tight_layout()
###plt.savefig('%s/rho_r.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
plt.show()

NameErrorTraceback (most recent call last)
<ipython-input-70-1353e9e5b34c> in <module>()
     25 plt.xscale('log')
     26 plt.yscale('log')
---> 27 plt.xlabel(r'$R=r/10^{%3.2f m}$'%sigma)
     28 plt.ylabel(r'$P(R)=P(r)\times 10^{%3.2f m}$'%sigma)
     29 plt.xlim(1e-5,1e0)

NameError: name 'sigma' is not defined

$\rho(r)\propto \frac{r^{\nu_1}}{1+(r/r_c)^{\nu_2}}$

In [71]:
from scipy.stats import chi2

#--- fitting

from scipy.optimize import curve_fit

def func(x,a,b1,b2,rc):
    return a*x**b1/(1.0+(x/rc)**b2)

sigma=0.5 #kam
pval_max = 0.0
for sigmaa in [sigma]: #np.linspace(0.2,0.5,16):
    #--- rescale
    x = []
    y = []
    ye = []
    for key in X:
#        if key > 1: #kam
#            continue

        x += list(X[key]/10**(sigmaa*np.mean(m_range[key])))
        y += list(rho_dict[key]*10**(sigmaa*np.mean(m_range[key])))
        ye += list(Err[key]*10**(sigmaa*m_mean[key]))

    #--- initialize 
    x = np.array(x)
    y = np.array(y)
    ye=np.array(ye)
    #
    ss=zip(x,y,ye)
    ss.sort()
    x=[i[0]for i in ss]
    y=[i[1]for i in ss]
    ye = [i[2] for i in ss]

    #--- fit
    popt0, pcov = curve_fit(func, x, y, 
                       p0=(1.0,Df-1,2.0,rc0),sigma=ye) #, bounds=((0.0,0.0), (np.inf,10.0)))

    #--- goodness of fit
    n=len(x)
    m=4 #--- number of coefficients
    xsq = np.sum(((y-func(x,*popt0))/ye)**2)
    pval=1-chi2.cdf(xsq,n-m)

    print pval
    if 1: #pval > pval_max:
        pval_max = pval
        xxdata = x[:]
        yydata = y[:]
        popt = popt0[:]
        sigma = sigmaa
        (a,b1,b2,rc)=popt0
        nu = -b2
        rho_pref = func(rc,a,b1,b2,rc)
    
#--- plot
fig = plt.figure(figsize=(4,4))
ax=fig.add_subplot(111)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$r[km]$')
ax.set_ylabel(r'$\rho(r)$')
#ax.set_xlim(2,6)
#ax.set_ylim(1e-4,1e2)
ax.scatter(xxdata,yydata)


# #--- fit
ax.plot(xxdata,func(xxdata,*popt),
        '-.r',zorder=3,
        label=r'$\rho(r)\propto \frac{r^{%3.2f}}{1+(r/r_c)^{%3.2f}},r_c={%2.1e}$'%(b1,b2,rc))
ax.legend(fontsize='xx-small',frameon=False)
ax.set_title(r'$q=%d,df=%s,\sigma=%3.2f,p=%3.2e$'%(quantile*100,Df,sigma,pval_max),fontsize='xx-small')
#plt.savefig('%s/fit.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight',transparent=True)
plt.show()
np.save('%s/nu'%DIR_OUTPT_figs,nu+b1)
np.save('%s/sigma_max'%DIR_OUTPT_figs,sigma)
np.save('%s/pval'%DIR_OUTPT_figs,pval_max)

NameErrorTraceback (most recent call last)
<ipython-input-71-903a4a9c16f8> in <module>()
     36     #--- fit
     37     popt0, pcov = curve_fit(func, x, y, 
---> 38                        p0=(1.0,Df-1,2.0,rc0),sigma=ye) #, bounds=((0.0,0.0), (np.inf,10.0)))
     39 
     40     #--- goodness of fit

NameError: name 'rc0' is not defined

different realizations

In [72]:
alpha_arr=[]
PVAL = []
SIGMA = []
for irun in xrange(16):
    a=np.load('runFillmore/Run%s/nu.npy'%(irun))
    b=np.load('runFillmore/Run%s/pval.npy'%(irun))
    c=np.load('runFillmore/Run%s/sigma_max.npy'%(irun))
#    print b.item()
    if b.item() < 0.001: #--- remove p < 0.001
        continue
    alpha_arr.append(a.item())
    if irun == 13: print a.item()
    PVAL.append([b.item(),irun])
    SIGMA.append(c.item())
df=pd.DataFrame({'mean':[np.mean(alpha_arr),np.mean(SIGMA)], 
                 'std_dev':[np.std(alpha_arr),np.std(SIGMA)],
                 'std_err':[np.std(alpha_arr)/len(alpha_arr)**.5,np.std(SIGMA)/len(SIGMA)**.5]},
                 index=[r'$\nu$',r'$\sigma$'])  
display(df)
print r'%2.1f<\nu<%2.1f'%((df['mean']+2*df['std_dev']).iloc[0],(df['mean']-2*df['std_dev']).iloc[0])

IOErrorTraceback (most recent call last)
<ipython-input-72-c9bc6293d9de> in <module>()
      3 SIGMA = []
      4 for irun in xrange(16):
----> 5     a=np.load('runFillmore/Run%s/nu.npy'%(irun))
      6     b=np.load('runFillmore/Run%s/pval.npy'%(irun))
      7     c=np.load('runFillmore/Run%s/sigma_max.npy'%(irun))

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/lib/npyio.pyc in load(file, mmap_mode, allow_pickle, fix_imports, encoding)
    370     own_fid = False
    371     if isinstance(file, basestring):
--> 372         fid = open(file, "rb")
    373         own_fid = True
    374     elif is_pathlib_path(file):

IOError: [Errno 2] No such file or directory: 'runFillmore/Run0/nu.npy'
In [73]:
PVAL.sort(reverse=True)
PVAL
Out[73]:
[]
In [74]:
#--- density plots: rho(r) (paper version)

import matplotlib as mpl
mpl.rcParams.update(mpl.rcParamsDefault)
import matplotlib.ticker
from matplotlib import rc as rcc

rcc('text', usetex=True)
font = {'size'   : 20}
matplotlib.rc('font', **font)

def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#sigma=0.3 #alpha





xdata = []
ydata = []
colors = ['black','red','green','blue','cyan','brown','violet','magenta','orange','yellow']
fillstyles=['white',None,'white',None,'white',None,'white',None,'white',None,'white',None,'white',None,'white',None]
markers=['o','s','D','^','<','>','v']
assert len(colors) >= len(X)
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    xdata += list(X[key]/10**(sigma*np.mean(m_range[key])))
    ydata += list(rho_dict[key]*10**(sigma*np.mean(m_range[key])))
    xlabel = '%2.1f-%2.1f'%(m_range[key][0],m_range[key][1])
    if key >= 4:
        xlabel = '%2.1f'%(m_mean[key])
    ax.errorbar(X[key]/10**(sigma*m_mean[key]),
                 rho_dict[key]*10**(sigma*m_mean[key]),
                 yerr=Err[key]*10**(sigma*m_mean[key]),
#                 label=xlabel,
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

#(ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),\
#           10**np.ceil(np.log10(np.max(ydata))))
(ylo,yhi)=(np.min(ydata),\
           np.max(ydata))
yhi*=2
ylo/=2
xc=(np.min(xdata)*np.max(xdata))**.5
xlo = xc/(yhi/ylo)**.5
xhi = xc*(yhi/ylo)**.5
ax.axis([xlo,xhi,ylo,yhi])
ax.loglog()

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
#plt.xlabel(r'$R=r/10^{%s m}$'%alpha)
#plt.ylabel(r'$P(R)=P(r)\times 10^{%s m}$'%alpha)

#fig.legend(title='m',prop=fontP,loc='upper right',frameon=False)#, bbox_to_anchor=(0.4, .6))
ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')

#--- plot power-law
(xlo,xhi)=(rc,100*rc) 
yhi=rho_pref/10**.5
ax.plot([xlo,xhi],[yhi, yhi* (xhi/xlo)**(nu+b1)],'r-.',
#        label=r'$\rho(r)\propto r^{%3.2f},r_{rup}\propto 10^{%3.2f m}$'%(nu+b1,sigma),
        linewidth=1)

#--- rescaled
#ax.scatter(xdata,1e2*np.array(ydata)/np.array(xdata)**(nu+b1))

#ax.legend(fontsize='x-small',frameon=False,bbox_to_anchor=(2.0,1))
ax.set_xlabel(r'$r/r_{rup}$')
ax.set_title(r'$\rho(r)\propto r^{%3.2f},r_{rup}\propto 10^{%3.2f m}$'%(nu+b1,sigma),fontsize='x-small')

#ax.set_title('$q=%d,d_f=%s$'%(quantile*100,Df),fontsize='xx-small')
#ax.set_ylabel(r'$\rho(r)\times r_{rup}$')

#ax.set_title(r'$q=%d,df=%s,p=%3.2e$'%(quantile*100,Df,pval_max),fontsize='xx-small')

    
#DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
plt.savefig('%s/rho_rescaled.4.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-74-0b24bc63a3d8> in <module>()
     43 #(ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),\
     44 #           10**np.ceil(np.log10(np.max(ydata))))
---> 45 (ylo,yhi)=(np.min(ydata),           np.max(ydata))
     46 yhi*=2
     47 ylo/=2

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/fromnumeric.pyc in amin(a, axis, out, keepdims)
   2418 
   2419     return _methods._amin(a, axis=axis,
-> 2420                           out=out, **kwargs)
   2421 
   2422 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/_methods.pyc in _amin(a, axis, out, keepdims)
     27 
     28 def _amin(a, axis=None, out=None, keepdims=False):
---> 29     return umr_minimum(a, axis, None, out, keepdims)
     30 
     31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):

ValueError: zero-size array to reduction operation minimum which has no identity

different realizations

In [75]:
from IPython.display import Image

for irun in xrange(16):
    path = 'runFillmore/Run%s'%irun
    #--- load image
    display(Image(filename='%s/rho_rescaled.png'%path))                       

IOErrorTraceback (most recent call last)
<ipython-input-75-aa3798014bb1> in <module>()
      4     path = 'runFillmore/Run%s'%irun
      5     #--- load image
----> 6     display(Image(filename='%s/rho_rescaled.png'%path))

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/IPython/core/display.pyc in __init__(self, data, url, filename, format, embed, width, height, retina, unconfined, metadata)
    975         self.unconfined = unconfined
    976         self.metadata = metadata
--> 977         super(Image, self).__init__(data=data, url=url, filename=filename)
    978 
    979         if retina:

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/IPython/core/display.pyc in __init__(self, data, url, filename)
    611         self.filename = None if filename is None else unicode_type(filename)
    612 
--> 613         self.reload()
    614         self._check_data()
    615 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/IPython/core/display.pyc in reload(self)
    997         """Reload the raw data from file or URL."""
    998         if self.embed:
--> 999             super(Image,self).reload()
   1000             if self.retina:
   1001                 self._retina_shape()

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/IPython/core/display.pyc in reload(self)
    629         """Reload the raw data from file or URL."""
    630         if self.filename is not None:
--> 631             with open(self.filename, self._read_flags) as f:
    632                 self.data = f.read()
    633         elif self.url is not None:

IOError: [Errno 2] No such file or directory: u'runFillmore/Run0/rho_rescaled.png'
In [76]:
#--- density plots: rho(r) (paper version)
from matplotlib import rc as rcc
import matplotlib as mpl
import matplotlib.ticker

mpl.rcParams.update(mpl.rcParamsDefault)
rcc('text', usetex=True)
font = {'size'   : 20}
matplotlib.rc('font', **font)

def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#sigma=alpha


xdata = []
ydata = []
y_err = []
colors = ['black','red','green','blue','cyan','brown','grey','magenta','orange','yellow']
fillstyles=['white',None,'white',None,'white',None,'white',None,'white',None,'white',None,'white',None,'white',None]
markers=['o','s','D','^','<','>','v']
assert len(colors) >= len(X)
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    xdata += list(X[key]/10**(sigma*np.mean(m_range[key])))
    ydata += list(rho_dict[key]*10**(sigma*np.mean(m_range[key])))
    y_err += list(Err[key]*10**(sigma*m_mean[key]))
    
    ax.errorbar(X[key]/10**(sigma*m_mean[key]),
                 rho_dict[key]*10**(sigma*m_mean[key]),
                 yerr=Err[key]*10**(sigma*m_mean[key]),
#                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )
xdata=np.array(xdata)
ydata=np.array(ydata)
y_err=np.array(y_err)
(ylo,yhi)=(10**np.floor(np.log10(np.min(ydata-y_err))),\
#           10**np.ceil(np.log10(np.max(ydata))))
           np.max(ydata))
#(xlo,xhi)=(10**np.floor(np.log10(np.min(xdata))),\
#           10**np.ceil(np.log10(np.max(xdata))))
xc=(np.min(xdata)*np.max(xdata))**.5
xlo = xc/(yhi/ylo)**.5
xhi = xc*(yhi/ylo)**.5

ax.axis([xlo,xhi,ylo,yhi])
ax.loglog()

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1])) #-1
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
#plt.xlabel(r'$R=r/10^{%s m}$'%alpha)
#plt.ylabel(r'$P(R)=P(r)\times 10^{%s m}$'%alpha)

#fig.legend(title='m',prop=fontP,loc='upper right',frameon=False)#, bbox_to_anchor=(0.4, .6))
ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')

#--- plot power-law
(xlo,xhi)=(rc,100*rc) 
yhi=rho_pref/10**.5
ax.plot([xlo,xhi],[yhi, yhi* (xhi/xlo)**(nu+b1)],'r-.',
#        label=r'$\rho(r)\propto r^{%3.2f},r_{rup}\propto 10^{%3.2f m}$'%(nu+b1,sigma),
        linewidth=1)

#ax.legend(fontsize='x-small',frameon=False)
#ax.set_xlabel(r'$r/r_{rup}$')
#ax.set_ylabel(r'$\rho(r)\times r_{rup}$')

#ax.set_title('q=%d%%,df=%s'%(quantile*100,Df),fontsize='xx-small')

    
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
plt.savefig('%s/rho_r_2nd.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight')
plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-76-8f59c7b7276f> in <module>()
     39 ydata=np.array(ydata)
     40 y_err=np.array(y_err)
---> 41 (ylo,yhi)=(10**np.floor(np.log10(np.min(ydata-y_err))),#           10**np.ceil(np.log10(np.max(ydata))))
     42            np.max(ydata))
     43 #(xlo,xhi)=(10**np.floor(np.log10(np.min(xdata))),\

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/fromnumeric.pyc in amin(a, axis, out, keepdims)
   2418 
   2419     return _methods._amin(a, axis=axis,
-> 2420                           out=out, **kwargs)
   2421 
   2422 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/_methods.pyc in _amin(a, axis, out, keepdims)
     27 
     28 def _amin(a, axis=None, out=None, keepdims=False):
---> 29     return umr_minimum(a, axis, None, out, keepdims)
     30 
     31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):

ValueError: zero-size array to reduction operation minimum which has no identity
In [77]:
#--- density plots: rho(r) (paper version 2nd)

def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#---
ax.spines['right'].set_visible(False) #--- half open
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#---
#ax.axis([1e-3,1e2,1e-4,1e1])
ax.loglog()
#ax.set_xticks(np.logspace(-1,3,4))




#plt.xlabel(r'$R=r/10^{%s m}$'%alpha)
#plt.ylabel(r'$P(R)=P(r)\times 10^{%s m}$'%alpha)
#plt.xlim(1e-5,1e0)
#plt.ylim(1e-5,1e0)

#colors = ['black','red','green','blue','cyan','brown']
#fillstyles=['white',None,'white',None,'white',None,'white']
#markers=['o','s','D','^','<','v']
#assert len(colors) >= len(X)
xmin = ymin = sys.maxint
ymax=xmax=-sys.maxint
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    if np.max(X[key]) > xmax:
        xmax = np.max(X[key])
    if np.min(rho_dict[key]) < ymin:
        ymin = np.min(rho_dict[key])
    if np.max(rho_dict[key]) > ymax:
        ymax = np.max(rho_dict[key])
    ax.errorbar(X[key],
                 rho_dict[key],
                 yerr=Err[key],
                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

(ylo,yhi)=(10**np.floor(np.log10(ymin)),\
           10**np.ceil(np.log10(ymax)))
xhi=10**np.ceil(np.log10(xmax))
ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

    
#fig.legend(title='m',prop=fontP,loc='lower left',frameon=False)#, bbox_to_anchor=(0.4, .6))
#ax.tick_params(axis='y',left=True, right=True,which='both')
#ax.tick_params(axis='x',bottom=True, top=True,which='both')
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.01,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
plt.savefig('%s/rho_r.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-77-11c69f0c9727> in <module>()
     47 (ylo,yhi)=(10**np.floor(np.log10(ymin)),           10**np.ceil(np.log10(ymax)))
     48 xhi=10**np.ceil(np.log10(xmax))
---> 49 ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])
     50 
     51 #--- add major xticks

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in axis(self, *v, **kwargs)
   1628             raise ValueError('v must contain [xmin xmax ymin ymax]')
   1629 
-> 1630         self.set_xlim([v[0], v[1]], emit=emit, auto=False)
   1631         self.set_ylim([v[2], v[3]], emit=emit, auto=False)
   1632 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in set_xlim(self, left, right, emit, auto, **kw)
   2946 
   2947         self._process_unit_info(xdata=(left, right))
-> 2948         left = self._validate_converted_limits(left, self.convert_xunits)
   2949         right = self._validate_converted_limits(right, self.convert_xunits)
   2950 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/matplotlib/axes/_base.pyc in _validate_converted_limits(self, limit, convert)
   2874                     (not np.isreal(converted_limit) or
   2875                         not np.isfinite(converted_limit))):
-> 2876                 raise ValueError("Axis limits cannot be NaN or Inf")
   2877             return converted_limit
   2878 

ValueError: Axis limits cannot be NaN or Inf
In [78]:
#--- density plots: rho(r) (paper version 2nd)

#plt.style.use('seaborn')
#plt.rcParams.update({'lines.markeredgewidth': 1})

def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#---
ax.spines['right'].set_visible(False) #--- half open
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#---
ax.axis([40/(20/5e-4),40,5e-4,20])
ax.loglog()
#ax.set_xticks(np.logspace(-1,3,4))

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))
#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())



ax.set_xlabel(r'$r(km)$')
ax.set_ylabel(r'$\rho(r)$')
#plt.xlim(1e-5,1e0)
#plt.ylim(1e-5,1e0)

#colors = ['black','red','green','blue','cyan','brown']
#fillstyles=['white',None,'white',None,'white',None,'white']
#markers=['o','s','D','^','<','v']
#assert len(colors) >= len(X)
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    xlabel = r'$%2.1f-%2.1f$'%(m_range[key][0],m_range[key][1])
    if key == 5:
        xlabel = r'$6.4$'
    if key == 7:
        xlabel = r'$7.1$'
    #    if key ==
    ax.errorbar(X[key],
                 rho_dict[key],
                 yerr=Err[key],
                 label=xlabel,
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

    ax.legend(title=r'$m$',prop=fontP,loc='lower left',frameon=False,
              fontsize ='xx-small',markerscale=1,bbox_to_anchor=(1, 0))

    
nu=-1.4
ax.plot([1e-1,100],[50, 50* (100/1e-1)**nu],'r-.',linewidth=1)    
    
#ax.tick_params(axis='y',left=True, right=True,which='both')
#ax.tick_params(axis='x',bottom=True, top=True,which='both')
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
#plt.savefig('%s/rho_r.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
In [79]:
from scipy.optimize import curve_fit

def func(x,pref, xmean,sigma):
    return pref * np.exp(-((x-xmean)/sigma)**2)

fig=plt.figure(figsize=(8,4))
ax=fig.add_subplot(121)
ax.axis([2e-3,2e-3*1e1/1e-3,1e-3,1e1])
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$r$')
ax.set_ylabel('$P(r)$')


r_peak={}
mm_mean={}
#colors = ['black','red','green','blue','cyan','brown']
#assert len(colors) >= len(X)
p0 = (1.0,0.1,.01)
for key, col in zip(X,colors):
    try:
#        print np.mean(X[key])
        ax.errorbar(X[key],rho_dict[key], 
                     yerr=Err[key],
                    linestyle='None', color=col,
                     fmt='-o', markersize=8
                    )            
        popt, pcov = curve_fit(func, X[key], rho_dict[key],
                               sigma=Err[key], 
                               p0=p0) #, bounds=((0.0,0.0), (np.inf,10.0)))
        mm_mean[key]=m_mean[key]
        p0 = popt[:]
#        print p0
        r_peak[key] = popt[1]
    #    if key == 5:
        ax.plot(X[key], func(X[key], *popt),'-',color=col)#,label='fit: r=%3.2f' % (popt[1]))
        ax.legend()
    except:
        continue

ax=fig.add_subplot(122)
ax.set_yscale('log')
ax.set_xlabel('$m$')
ax.set_ylabel('$r$')
#ax.set_ylim(1,1e3)
ax.plot(mm_mean.values(),r_peak.values(),'o')
#
#
def func(x,c,alpha):
    return c*10**(alpha*x)
#
popt, pconv = curve_fit( func, mm_mean.values(),r_peak.values() )
#
ax.plot(mm_mean.values(),func( np.array(mm_mean.values()),*popt),'--r')
print popt
print pconv**0.5

fig.tight_layout()

TypeErrorTraceback (most recent call last)
<ipython-input-79-9f31cc10a9cd> in <module>()
     50     return c*10**(alpha*x)
     51 #
---> 52 popt, pconv = curve_fit( func, mm_mean.values(),r_peak.values() )
     53 #
     54 ax.plot(mm_mean.values(),func( np.array(mm_mean.values()),*popt),'--r')

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in curve_fit(f, xdata, ydata, p0, sigma, absolute_sigma, check_finite, bounds, method, jac, **kwargs)
    740         # Remove full_output from kwargs, otherwise we're passing it in twice.
    741         return_full = kwargs.pop('full_output', False)
--> 742         res = leastsq(func, p0, Dfun=jac, full_output=1, **kwargs)
    743         popt, pcov, infodict, errmsg, ier = res
    744         cost = np.sum(infodict['fvec'] ** 2)

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in leastsq(func, x0, args, Dfun, full_output, col_deriv, ftol, xtol, gtol, maxfev, epsfcn, factor, diag)
    378     m = shape[0]
    379     if n > m:
--> 380         raise TypeError('Improper input: N=%s must not exceed M=%s' % (n, m))
    381     if epsfcn is None:
    382         epsfcn = finfo(dtype).eps

TypeError: Improper input: N=2 must not exceed M=0
In [80]:
#--- fitting

from scipy.optimize import curve_fit

def func(x,a,b):
    return a/x**b

xdata = np.array(xdata)
xxdata = xdata[xdata>2e-2]
ydata = np.array(ydata)
yydata = ydata[xdata>2e-2]

popt, pcov = curve_fit(func, xxdata, yydata, p0=(1.0,1.0), bounds=((0.0,0.0), (np.inf,10.0)))

plt.figure(figsize=(5,5))
plt.xscale('log')
plt.yscale('log')
#plt.xlim(1e-1,1e2)
#plt.ylim(1e-4,1e-1)
plt.plot(xdata, ydata,marker='o',linestyle='None')
plt.plot(xdata, func(xdata, *popt),'r-',label='fit: a=%3.2f' % popt[1])
plt.legend()         
#plt.plot(xdata, func(xdata, *popt), 'g--',
#         label='fit: a=%5.3f, b=%5.3f' % tuple(popt))

ValueErrorTraceback (most recent call last)
<ipython-input-80-6da26173dfee> in <module>()
     11 yydata = ydata[xdata>2e-2]
     12 
---> 13 popt, pcov = curve_fit(func, xxdata, yydata, p0=(1.0,1.0), bounds=((0.0,0.0), (np.inf,10.0)))
     14 
     15 plt.figure(figsize=(5,5))

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in curve_fit(f, xdata, ydata, p0, sigma, absolute_sigma, check_finite, bounds, method, jac, **kwargs)
    760 
    761         # Do Moore-Penrose inverse discarding zero singular values.
--> 762         _, s, VT = svd(res.jac, full_matrices=False)
    763         threshold = np.finfo(float).eps * max(res.jac.shape) * s[0]
    764         s = s[s > threshold]

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/linalg/decomp_svd.pyc in svd(a, full_matrices, compute_uv, overwrite_a, check_finite, lapack_driver)
    123     # compute optimal lwork
    124     lwork = _compute_lwork(gesXd_lwork, a1.shape[0], a1.shape[1],
--> 125                            compute_uv=compute_uv, full_matrices=full_matrices)
    126 
    127     # perform decomposition

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/linalg/lapack.pyc in _compute_lwork(routine, *args, **kwargs)
    591     if info != 0:
    592         raise ValueError("Internal work array size computation failed: "
--> 593                          "%d" % (info,))
    594 
    595     lwork = [w.real for w in wi[:-1]]

ValueError: Internal work array size computation failed: -5
In [81]:
#--- choose bins for rho and lambda

import traceback

#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)



#--- remove pairs with r > cs.t
#cs = 3 * (24*3600) #--- km per day
df_trig = nij_trig.copy()

#--- parents' magnitudes
m_list = GetParentsMagnitudes( swarm_lohi, df_trig )

#--- number of daughters for each mother
df_nn = df_trig.groupby(by='Parent_id').count()['Event_id']


#--- include daughters with zero children
list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)


#--- scattered plot
fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
#ax.set_xlim(mc,swarm_lohi['magnitude'].max()+0.5)
m_min = min(m_list)-1.0e-6
m_max = max(m_list)+1.0e-6 #--- exclude the largest event
ax.set_xlim(m_min,np.ceil(m_max))
ax.set_ylim(0.1,1000)
ax.plot( swarm_lohi['magnitude'].loc[df_nn.index],
           df_nn,
          'o',color='black',alpha=.1)
ax.plot( swarm_lohi['magnitude'].loc[df_zeros.index],
         df_zeros,
          'x',color='gray')

plt.yscale('log')
#plt.xlabel('$m$')
#plt.ylabel(r'$N_{as}$')

#--- choose m0<m<m1
dm = 0.75 #0.5
n = int( (6.0 - m_min ) / dm )
m = list(np.linspace(m_min,6.0,n))
#m=AdjustBins(m_list,m)
# m=[2.7999989999999997,
#  3.133332555555555,
#  3.466666111111111,
#  3.7999996666666664,
#  4.133333222222222,
#  4.466666777777778,
#  4.800000333333333,
#  5.800001]
m+=[6.41,7.1-0.01,7.1+0.01]
Mlist=m

m_mean=np.array(np.zeros(len(m) - 1))
m_err=np.array(np.zeros(len(m) - 1))
N_as = np.array(np.zeros(len(m) - 1))
err_N = np.array(np.zeros(len(m) - 1))
for i in xrange( len(m) - 1 ):
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        df, m_mean[ i ], m_err[i] = GetPairsWithSpecifiedParentMag((m0,m1),
                                                         swarm_lohi,df_trig) #--- get parent with m0<m<m1
        df.reset_index( inplace = True, drop = True )
#        pdb.set_trace()
        #--- number of daughters for each mother
        df_n = df.groupby(by='Parent_id').count()['Event_id']
    
        #--- include daughters with zero children
        list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
        subset = swarm_lohi.loc[list_of_zero_afs]
        list_of_zero_afs = subset[(subset['magnitude'] >= m0) & (subset['magnitude'] <= m1)].index    
        df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
        #--- mean
#        print [m0,m1],list_of_zero_afs
#        pdb.set_trace()
        
        N_as[ i ] = 1.0*(df_n.sum()+ df_zeros.sum())/(len(df_n)+len(df_zeros))
        err_N[ i ] = (N_as[ i ]/(len(df_n)+len(df_zeros)))**0.5 #--- poisson
#        err_N[ i ]=np.std(df_n.tolist()+df_zeros.tolist())/(len(df_n)+len(df_zeros))**0.5
    except:
#        traceback.print_exc()
        continue
    
#ax.plot(m, np.append(N_as,N_as[-1]), linestyle='-', drawstyle='steps-post',color='red')
ax.errorbar(m_mean,N_as,
            yerr=err_N,xerr=m_err,
            markerfacecolor='white',
            fmt='o', markersize=8,color='red',markeredgewidth=0.7,
            linewidth=.5,
            barsabove=None,capsize=5,capthick=1,elinewidth=1)


#--- add major xticks
xmin=np.ceil(ax.axis()[0])
xmax=np.floor(ax.axis()[1])
nbin = xmax - xmin
ax.set_xticks(np.linspace(xmin,xmax,int(nbin)+1))

DrawFrame(ax, (0.15,0.08),(0.14,0.06),0.1,LOG_Y=True)

try:
    from scipy.optimize import curve_fit
    def func(x,c,alpha):
        return c*10**(alpha*x)
    popt, pconv = curve_fit(func,m_mean[m_mean>=mc],N_as[m_mean>=mc])
    
    mm = swarm_lohi['magnitude'].loc[df_zeros.index].tolist() + \
         swarm_lohi['magnitude'].loc[df_nn.index].tolist() #swarm_lohi[swarm_lohi['magnitude']>=mc]['magnitude']
    mm = np.array(mm)
    mm=list(set(mm))
    mm.sort()
    mm=np.array(mm)
    ax.plot(mm,func(mm,*popt),'-.g')
    print popt
    print pconv**0.5
except:
    pass

#plt.savefig('%s/prod.png'%DIR_OUTPT_figs,dpi=150,bbox_inches='tight')


fig = plt.figure(figsize=(4,4),frameon=True)
ax = fig.add_subplot(111)
ax.plot(m, np.append(N_as,N_as[-1]), linestyle='-', drawstyle='steps-post',color='red')
ax.errorbar(m_mean,N_as,
            yerr=err_N,xerr=m_err,
            markerfacecolor='white',
            fmt='o', markersize=8,color='red',markeredgewidth=0.7,
            linewidth=.5,
            barsabove=None,capsize=5,capthick=1,elinewidth=1)
ax.set_xlim(m_min,np.ceil(m_max))
ax.set_ylim(0.1,1000)
ax.set_yscale('log')
ax.set_xticks(np.linspace(xmin,xmax,int(nbin)+1))
plt.show()
[0.21956281 0.17631794]
[[0.11981937        nan]
 [       nan 0.04908487]]
In [82]:
#--- omori relation

import numpy as np
import math
from scipy.optimize import curve_fit

#def func(x,c,p,A):#,B):
#    return A/(1+x/c)**p

def func(x,c,p,A,c2,p2):#,B):
    return A/(1+(x/c)**p+(x/c2)**p2)


#def func(x,c,p):#,B):
#    return (p-1)/c/(1+x/c)**p



#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi.reset_index(inplace=True,drop=True)

popt={}
pcov={}
perr = {}

##--- parents' magnitudes
#m_list = GetParentsMagnitudes( swarm_lohi, df_trig )
#m_min = min(m_list)-1.0e-6
#m_max = max(m_list)+1.0e-6 #--- exclude the largest event
#
##--- choose m0<m<m1
#dm = 0.3
#n = int( ( m_max - m_min ) / dm )
#m = list(np.linspace(m_min,m_max,n))
#m=AdjustBins(m_list,m)
#Mlist=m



X={}
rho_dict={}
#m_range={}
Err={}
m_mean=np.array(np.zeros(len(m)-1))
Xedges={}
nrows=len(m)-1;ncols=2
fig=plt.figure(figsize=(ncols*4,nrows*4))


for i in xrange( len(m) - 1 ):
#    if i == (len(m) - 2):
#        pdb.set_trace()
    try:
        m0 = m[ i ]
        m1 = m[ i + 1 ]
        print (m0,m1)
        df_trig,m_mean[ i ],junk = GetPairsWithSpecifiedParentMag((m0,m1),swarm_lohi,nij_trig) #--- get parent with m0<m<m1
        df_trig.reset_index( inplace = True, drop = True )
#        print "# of mother events:%s"%len(df_trig.groupby(by="Parent_id"))
    
        #--- number of daughters for each mother
        df_n = df_trig.groupby(by='Parent_id').count()['Event_id']
        
        #--- include daughters with zero children
        list_of_zero_afs = set(df_trig['Event_id'])-set(df_trig['Parent_id'])
        subset = swarm_lohi.loc[list_of_zero_afs]
        list_of_zero_afs = subset[(subset['magnitude'] >= m0) & (subset['magnitude'] <= m1)].index    
        df_zeros = pd.Series(np.zeros(len(list_of_zero_afs)),index=list_of_zero_afs)
        #--- mean
        N_as = 1.0*(df_n.sum()+ df_zeros.sum())/(len(df_n)+len(df_zeros))
#        print 'N_as=%s'%N_as
        #--- add distance & time column
    #    df_trig['R'] = rmat[df_trig['Parent_id'],df_trig['Event_id']]
    #    df_trig['T'] = tmat[df_trig['Parent_id'],df_trig['Event_id']]
    #    assert len ( df_trig[ df_trig['R'] == 0.0 ] ) == 0, '%s'%display( df_trig[ df_trig['R'] == 0.0 ] )
        
        #--- remove pairs with r > cs.t
    #    cs = 3 * (24*3600) #--- km per day
    #    df_trig = RemovePair( df_trig, cs )
    
        #--- rho plots
        n_decades = int( math.ceil( log(df_trig['T'].max()/df_trig['T'].min(),10) ) )
        nbin_per_decade = 4
        nbins =  nbin_per_decade * n_decades
        bins = np.logspace(log(df_trig['T'].min(),10)-1.e-10,
                                                  log(df_trig['T'].max(),10)+1.e-10,nbins)
        bins = AdjustBins(df_trig['T'],bins)
    
        rho, xedges=np.histogram(df_trig['T'], #--- density
                                 density=True,
                                 bins=bins)
        rho *= N_as #--- int rho = N_as
        hist, xedges=np.histogram(df_trig['T'], #--- histogram
                                  bins=bins)
    
        x_bins, junk=np.histogram(df_trig['T'], #--- histogram
                                  bins=bins,
                                 weights = df_trig['T'])
    
        #--- rm empty bins
        x_bins = x_bins[ hist != 0 ]
        rho = rho[ hist != 0 ]
        hist = hist[ hist != 0 ]
        
        #--- poisson err 
        err = rho/np.sqrt(hist)
        x_bins /= hist
    
        #---- fit    
    #    fit = plfit.plfit( np.array(df_trig['T']) ) #--- clauset
    #    cval = fit[1] #--- completeness mag.
    #    pval = fit[0]
    #    [pval, gof] = plpva.plpva(np.array(df_trig['T']), cval, 'xmin', cval,'silent')
    #    print pval
        
        #--- 2nd fit
        try:
            popt[i], pcov[i] = curve_fit(func, x_bins, rho, 
                                   p0=(1.0e-3,1.5,1e3,1e0,2.0), 
                                   bounds=((1.0e-6,0.1,1e-5,1e-6,0.1), (np.inf,7,np.inf,np.inf,7)), #c,p,A,c2,p2
                                   sigma=err #rho**0.5 #err
        #                           absolute_sigma = True,
        #                          method='trf'#'dogbox'
                                  )
            perr[i] = np.sqrt(np.diag(pcov[i]))
        except:
            pass
        #--- scattered plot
        ax=fig.add_subplot(nrows,ncols,2*i+1)
        ax.set_title(r'$%2.1f<m<%2.1f$'%(m0,m1))
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$t$')
        ax.set_ylabel('$r$')
        #plt.xlim(1e-3,100)
        #plt.ylim(1e-3,1e2)
        ax.scatter(df_trig['T'],df_trig['R'],alpha=0.1)
        
        #--- discretization effects
        dt_min=df_trig['T'].min()
        dt_max=df_trig['T'].max()
        dr_min=df_trig['R'].min()
        dr_max=df_trig['R'].max()
        
        ax.plot([dt_min,dt_min],[dr_min,dr_max],
                 '-',color='blue')
        ax.plot([dt_max,dt_max],[dr_min,dr_max],
                 '-',color='blue')
        ax.plot([dt_min,dt_max],[dr_min,dr_min],
                 '-',color='blue')
        ax.plot([dt_min,dt_max],[dr_max,dr_max],
                 '-',color='blue')
        
        ax.set_ylim(dr_min/2,dr_max*2)
        ax.plot([dt_min,dt_max],[dt_min*cs,dt_max*cs],
                 '-',color='red')
        
        #--- rho
        ax=fig.add_subplot(nrows,ncols,2*i+2)
        ax.set_xlim(1e-4,1e2)
    #    ax.set_ylim(1e-3,1e3)
        ax.set_xscale('log')
        ax.set_yscale('log')
        ax.set_xlabel('$t$')
        ax.set_ylabel('$\lambda(t)$')
        ax.errorbar(x_bins,rho, 
                     yerr=err,
                    linestyle='None',
                     fmt='-o', markersize=8
                    )
        
        #--- plot the fit
    #    plt.plot(x_bins,
    #             (pval-1)/cval/(1+x_bins/cval)**pval,
    #             label='fit: p=%2.1f, c=%4.3f' % (pval,cval)
    #            )
        #--- 2nd fit
        try:
            ax.plot(x_bins, func(x_bins, *popt[i]),'r-',label='fit: p0=%3.2f, p1=%3.2f' % (popt[i][1],popt[i][4]))
        except:
            traceback.print_exc()
            pass
        fig.tight_layout()
        ax.legend(fontsize='x-small') #prop=fontP)#bbox_to_anchor=(1.5, 0.5))
        
        X[i] = x_bins[:]
        Xedges[i]=xedges
        rho_dict[i]=rho[:]
    #    m_range[i]=np.array([m0,m1])
        Err[i]=err[:]
    except:
        continue
#fig.savefig('%s/rho_t.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
plt.show()
(1.999999, 2.99999925)
(2.99999925, 3.9999995000000004)
(3.9999995000000004, 4.99999975)
(4.99999975, 6.0)
(6.0, 6.41)
(6.41, 7.09)
(7.09, 7.109999999999999)
<matplotlib.figure.Figure at 0x7fcc49cde990>
In [83]:
KEYS=popt.keys()
KEYS.sort()
M_Mean=[]
pval=[]
cval = []
cerr = []
aval = []
aerr = []
mm=[]
pperr=[]
AVAL = {}
fig = plt.figure(figsize=(12,4))
ax=fig.add_subplot(131)
for ikey in KEYS: #--- exclude the last point
    pval.append( min(popt[ikey][1],popt[ikey][4]) )
    cval.append( min(popt[ikey][0],popt[ikey][3]) )
    aval.append( popt[ikey][2] )
    AVAL[ikey]=popt[ikey][2]
    aerr.append(perr[ikey][2])
    index = min([popt[ikey][1],1],[popt[ikey][4],4])[1]
    pperr.append(perr[ikey][index])
    cerr.append(perr[ikey][index-1])
    M_Mean.append(m_mean[ikey])
ax.errorbar(M_Mean,pval,yerr=pperr,
            marker='o',color='black')
#ax.set_xlim(0,4)
ax.set_ylim(0,3)
ax.set_xlabel('$m$')
ax.set_ylabel('$p$')
ax.set_title('nth=%2.1e'%n_thresh)
#ax.plot([0,3],[1.29,1.29],'--r')
#ax.plot([0,3],[1.37,1.37],'--r')

#--- plot c 
def func(x,a,b):
    return a*10**(x*b)
poptt, pcovv = curve_fit(func, M_Mean,cval, sigma=cerr)
print poptt

ax2=fig.add_subplot(132)
ax2.set_ylim(1e-4,1e-1)
#ax2.set_xlim(0,4)
ax2.set_yscale('log')
ax2.set_ylabel('$c$')
ax2.set_xlabel('$m$')
ax2.errorbar(M_Mean,cval,yerr=cerr,
            marker='o',color='black')

try:
    ax2.plot(M_Mean,func(M_Mean,*poptt),'--r')
    
    #--- plot A
    poptt, pcovv = curve_fit(func, M_Mean,aval, sigma=aerr)
    ax3=fig.add_subplot(133)
    #ax3.set_ylim(1e2,1e4)
    #ax3.set_xlim(0,4)
    ax3.set_yscale('log')
    ax3.set_ylabel('$A$')
    ax3.set_xlabel('$m$')
    ax3.errorbar(M_Mean,aval,yerr=aerr,
                marker='o',color='black')
    ax3.plot(m_mean,func(M_Mean,*poptt),'--r')
    print poptt
except:
    pass

fig.tight_layout()
#plt.savefig('%s/p.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
plt.show()

TypeErrorTraceback (most recent call last)
<ipython-input-83-62f8f5b9d024> in <module>()
     35 def func(x,a,b):
     36     return a*10**(x*b)
---> 37 poptt, pcovv = curve_fit(func, M_Mean,cval, sigma=cerr)
     38 print poptt
     39 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in curve_fit(f, xdata, ydata, p0, sigma, absolute_sigma, check_finite, bounds, method, jac, **kwargs)
    740         # Remove full_output from kwargs, otherwise we're passing it in twice.
    741         return_full = kwargs.pop('full_output', False)
--> 742         res = leastsq(func, p0, Dfun=jac, full_output=1, **kwargs)
    743         popt, pcov, infodict, errmsg, ier = res
    744         cost = np.sum(infodict['fvec'] ** 2)

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in leastsq(func, x0, args, Dfun, full_output, col_deriv, ftol, xtol, gtol, maxfev, epsfcn, factor, diag)
    378     m = shape[0]
    379     if n > m:
--> 380         raise TypeError('Improper input: N=%s must not exceed M=%s' % (n, m))
    381     if epsfcn is None:
    382         epsfcn = finfo(dtype).eps

TypeError: Improper input: N=2 must not exceed M=0
In [84]:
#--- omori relation


plt.figure(figsize=(16.5,5))


#--- unrescaled
plt.subplot(131)
plt.xscale('log')
plt.yscale('log')
plt.xlabel('$t$')
plt.ylabel('$\lambda(t)$')
#plt.xlim(1e-6,1e1)
#plt.ylim(1e0,1e7)
KEYS = X.keys()
KEYS.sort()
for key, index in zip(KEYS,xrange(100)):
    plt.errorbar(X[key],rho_dict[key],yerr=Err[key], 
#                label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),

                 fmt='-o', markersize=8
                )
#plt.legend(title='m',fontsize='small',loc='upper right',bbox_to_anchor=(0.4, .65),frameon=False)

xdata = []
ydata = []
y_err = []
#--- rescaled
alpha_c = 0.0 #0.5
#alpha_p = 0.43 #--- productivity
alpha_y = alpha_c - alpha_p

plt.subplot(132)
plt.xscale('log')
plt.yscale('log')
plt.xlabel(r'$t/10^{%3.2f\times m}$'%alpha_c)
plt.ylabel(r'$\lambda(t) \times 10^{(%3.2f-%3.2f)m}$'%(alpha_c,alpha_p))
#plt.ylabel(r'$\lambda(t) \times t^p$')
#plt.xlim(1e-6,1e0)
#plt.ylim(1e-3,1e5)
aval=AVAL
for key, index in zip(KEYS,xrange(100)):
    try:
    #    xdata += list(X[key]/10**(alpha_c*m_mean[key]))
    #    print key, aval[key]
        xdata += list(X[key]/aval[key])
    #    ydata += list(rho_dict[key]*10**(alpha_y*m_mean[key]))
        ydata += list(rho_dict[key]*aval[key]*10**(alpha_y*m_mean[key]))
    #    y_err += list(Err[key]*10**(alpha_y*m_mean[key]))
        y_err += list(Err[key]*aval[key]*10**(alpha_y*m_mean[key]))
        
    #     plt.errorbar(X[key]/10**(alpha_c*m_mean[key]),
    #                  rho_dict[key]*10**(alpha_y*m_mean[key]),
    #                  yerr=Err[key]*10**(alpha_y*m_mean[key]), 
    # #                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
    #                  label='%2.1f'%(m_mean[key]),
    #                  fmt='-o', markersize=8
    #                 
        plt.errorbar(X[key]/aval[key],
                     rho_dict[key]*aval[key]*10**(alpha_y*m_mean[key]),
                     yerr=Err[key]*aval[key]*10**(alpha_y*m_mean[key]), 
    #                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                     label='%2.1f'%(m_mean[key]),
                     fmt='-o', markersize=8
                    )
    
        plt.legend(title='m',fontsize='small',loc='upper right',bbox_to_anchor=(0.4, .65),frameon=False)
    except:
        continue
        
#--- rescaled by t^-p
p = 1

plt.subplot(133)
plt.xscale('log')
plt.yscale('log')
plt.xlabel(r'$t/10^{%3.2f\times m}$'%alpha_c)
plt.ylabel(r'$\lambda(t) \times 10^{(%3.2f-%3.2f)m} \times t^{%3.2f}$'%(alpha_c,alpha_p,p))
#plt.ylabel(r'$\lambda(t) \times t^p$')
#plt.xlim(1e-6,1e0)
#plt.ylim(1e-5,1e-3)
for key, index in zip(KEYS,xrange(100)):
    x_new = X[key]/10**(alpha_c*m_mean[key])
    y_new = rho_dict[key]*10**(alpha_y*m_mean[key])
    y2_new = Err[key]*10**(alpha_y*m_mean[key])
    plt.errorbar(x_new,
                 y_new*x_new**p,
                 yerr=y2_new*x_new**p, 
                 fmt='-o', markersize=8
                )

plt.tight_layout()
##plt.savefig('%s/rho_t_mult.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')
plt.show()

$\rho(t)\propto 1/((t/t_0)^{p_0}+(t/t_1)^{p_1})$

In [85]:
#--- fitting

from scipy.optimize import curve_fit


def func(x,c,p,A):
    return A/(1+(x/c)**p) 

#def func(x,b,a):
#    return a*x**b

#tc=1e-3

xdata = np.array(xdata)
xxdata = xdata[:]#[xdata>tc]
ydata = np.array(ydata)
yydata = ydata[:] #xdata>tc]
Y_err=y_err
#
ss=zip(xxdata,yydata,Y_err)
ss.sort()
xxdata=[i[0]for i in ss]
yydata=[i[1]for i in ss]
Y_err = [i[2] for i in ss]

popt, pcov = curve_fit(func, xxdata, yydata, 
                       p0=(1.0e-2,1.0,1e2),#,1e0,2.0), 
                       bounds=((1.0e-6,0.1,1e-5), (np.inf,7,np.inf)), #c,p,A,c2,p2
                       sigma=Y_err)
p = -popt[1]
rho_pref = func(np.min(xxdata),*popt)
[tlo,thi]=[popt[0],1e10]

#--- goodness of fit
n=len(xxdata)
m=3 #--- number of coefficients
xsq = np.sum(((yydata-func(xxdata,*popt))/Y_err)**2)
pval=1-chi2.cdf(xsq,n-m)
print 'p=',pval

#--- linear
#popt = np.polyfit(np.log10(xxdata),np.log10(yydata), 1)
#popt[1] = 10**(popt[1])
#p = popt[0]
#rho_pref = popt[1]

#--- plot
fig = plt.figure(figsize=(4,4))
ax=fig.add_subplot(111)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$t$')
ax.set_ylabel(r'$\lambda(t)$')
#ax.set_xlim(2,6)
#ax.set_ylim(1e-4,1e2)
ax.scatter(xdata,ydata)
ax.scatter(xdata,ydata*xdata**(-p))
ax.scatter([popt[0]],func(np.array([popt[0]]),*popt), #--- size
          marker='x',c='black',s=100)

#--- fit
ax.plot(xxdata,func(xxdata,*popt),
        '-.r',zorder=3,
        label=r'$\lambda(t)\propto t^{%3.2f}$'%(p))
ax.legend(fontsize='xx-small',frameon=False)
#ax.set_title('q=%d%%,df=%s'%(quantile*100,Df),fontsize='xx-small')
plt.savefig('%s/omori_1st.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight',transparent=True)

#--- save p
np.save('omori_exp1st',p)
plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-85-d15818266c2e> in <module>()
     27                        p0=(1.0e-2,1.0,1e2),#,1e0,2.0),
     28                        bounds=((1.0e-6,0.1,1e-5), (np.inf,7,np.inf)), #c,p,A,c2,p2
---> 29                        sigma=Y_err)
     30 p = -popt[1]
     31 rho_pref = func(np.min(xxdata),*popt)

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/optimize/minpack.pyc in curve_fit(f, xdata, ydata, p0, sigma, absolute_sigma, check_finite, bounds, method, jac, **kwargs)
    760 
    761         # Do Moore-Penrose inverse discarding zero singular values.
--> 762         _, s, VT = svd(res.jac, full_matrices=False)
    763         threshold = np.finfo(float).eps * max(res.jac.shape) * s[0]
    764         s = s[s > threshold]

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/linalg/decomp_svd.pyc in svd(a, full_matrices, compute_uv, overwrite_a, check_finite, lapack_driver)
    123     # compute optimal lwork
    124     lwork = _compute_lwork(gesXd_lwork, a1.shape[0], a1.shape[1],
--> 125                            compute_uv=compute_uv, full_matrices=full_matrices)
    126 
    127     # perform decomposition

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/scipy/linalg/lapack.pyc in _compute_lwork(routine, *args, **kwargs)
    591     if info != 0:
    592         raise ValueError("Internal work array size computation failed: "
--> 593                          "%d" % (info,))
    594 
    595     lwork = [w.real for w in wi[:-1]]

ValueError: Internal work array size computation failed: -5

linear fit with t>tc

In [86]:
#--- linear fitting

from scipy.optimize import curve_fit


def func(x,b,a):
    return a*x**b 


xdata = np.array(xdata)
xxdata = xdata[np.all([xdata>tlo,xdata<thi],axis=0)]
#
ydata = np.array(ydata)
yydata = ydata[np.all([xdata>tlo,xdata<thi],axis=0)]
#
y_err = np.array(y_err)
Y_err=y_err[np.all([xdata>tlo,xdata<thi],axis=0)]
#
ss=zip(xxdata,yydata,Y_err)
ss.sort()
xxdata=[i[0]for i in ss]
yydata=[i[1]for i in ss]
Y_err = [i[2] for i in ss]


#--- linear
popt = np.polyfit(np.log10(xxdata),np.log10(yydata), 1,
                 w=np.array(yydata)/np.array(Y_err))
popt[1] = 10**(popt[1])
p = popt[0]
rho_pref = popt[1]

#--- goodness of fit
n=len(xxdata)
m=2 #--- number of coefficients
xsq = np.sum(((yydata-func(xxdata,*popt))/Y_err)**2)
pval=1-chi2.cdf(xsq,n-m)
print 'p=',pval

#--- plot
fig = plt.figure(figsize=(4,4))
ax=fig.add_subplot(111)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('$t$')
ax.set_ylabel(r'$\lambda(t)$')
#ax.set_xlim(2,6)
#ax.set_ylim(1e-4,1e2)
ax.scatter(xdata,ydata)
ax.scatter(xdata,ydata*xdata**(-p))
#ax.scatter([popt[0]],func(np.array([popt[0]]),*popt), #--- size
#          marker='x',c='black',s=100)

#--- fit
ax.plot(xxdata,func(xxdata,*popt),
        '-.r',zorder=3,
        label=r'$\lambda(t)\propto t^{%3.2f}$'%(p))
ax.legend(fontsize='xx-small',frameon=False)
#ax.set_title('q=%d%%,df=%s'%(quantile*100,Df),fontsize='xx-small')
plt.savefig('%s/omori_2nd.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight',transparent=True)

#--- save p
np.save('omori_exp2nd',p)

plt.show()

NameErrorTraceback (most recent call last)
<ipython-input-86-e2e548c7b1a0> in <module>()
      9 
     10 xdata = np.array(xdata)
---> 11 xxdata = xdata[np.all([xdata>tlo,xdata<thi],axis=0)]
     12 #
     13 ydata = np.array(ydata)

NameError: name 'tlo' is not defined
In [87]:
#--- omori (paper version)


#--- rescaled

fig, ax = plt.subplots(figsize=(4,4))
#ax.axis([1/(20/3e-3),1,3e-3,20])
#ax.loglog()

#colors = ['black','red','green','blue','cyan','brown']
#fillstyles=['white',None,'white',None,'white',None,'white']
#markers=['o','s','D','^','<','v']
#assert len(colors) >= len(X)
xdata = []
ydata = []
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    
    xdata += list(X[key]/10**(alpha_c*m_mean[key]))
    ydata += list(rho_dict[key]*10**(alpha_y*m_mean[key]))
    
    
    
    ax.errorbar(X[key]/10**(alpha_c*m_mean[key]),
                 rho_dict[key]*10**(alpha_y*m_mean[key]),
                 yerr=Err[key]*10**(alpha_y*m_mean[key]), 
#                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )
        
(ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),\
           10**np.ceil(np.log10(np.max(ydata))))
xhi=10**np.ceil(np.log10(np.max(xdata)))
ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])
ax.loglog()

##--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())



#fig.legend(title='m',prop=fontP,loc='upper right',frameon=False)#, bbox_to_anchor=(0.4, .6))
ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')

ax.set_xlabel('$t(day)$')
ax.set_ylabel('$\lambda(t)/10^{%3.2fm}$'%alpha_p)




#--- plot power-law
(xlo,xhi)=(10**xmin,10**(xmin+6)) #(0.8*15e-3,0.8*.6)
yhi=func(xlo,*popt)*1e-2 #rho_pref #*xmin**p/10
ax.plot([xlo,xhi],[yhi, yhi* (xhi/xlo)**p],'r-.',
        label=r'$\lambda(t)\propto t^{%3.2f}$'%(p),
        linewidth=1)

ax.legend(fontsize='x-small',frameon=False)
ax.set_xlabel(r'$t$')
#ax.set_ylabel(r'$\rho(r)\times r_{rup}$')

ax.set_title('q=%d,df=%s'%(quantile*100,Df),fontsize='xx-small')

DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.01,LOG_X=True,LOG_Y=True)

#plt.tight_layout()
plt.savefig('%s/rho_t2nd.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight')

plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-87-723c5df49acd> in <module>()
     30                 )
     31 
---> 32 (ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),           10**np.ceil(np.log10(np.max(ydata))))
     33 xhi=10**np.ceil(np.log10(np.max(xdata)))
     34 ax.axis([xhi/(yhi/ylo),xhi,ylo,yhi])

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/fromnumeric.pyc in amin(a, axis, out, keepdims)
   2418 
   2419     return _methods._amin(a, axis=axis,
-> 2420                           out=out, **kwargs)
   2421 
   2422 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/_methods.pyc in _amin(a, axis, out, keepdims)
     27 
     28 def _amin(a, axis=None, out=None, keepdims=False):
---> 29     return umr_minimum(a, axis, None, out, keepdims)
     30 
     31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):

ValueError: zero-size array to reduction operation minimum which has no identity
In [88]:
#--- omori (paper version)

import re


# create a definition for the short hyphen
matplotlib.rcParams["text.latex.preamble"].append(r'\mathchardef\mhyphen="2D')

class MyLogFormatter(matplotlib.ticker.LogFormatterMathtext):
    def __call__(self, x, pos=None):
        # call the original LogFormatter
        rv = matplotlib.ticker.LogFormatterMathtext.__call__(self, x, pos)

        # check if we really use TeX
        if matplotlib.rcParams["text.usetex"]:
            # if we have the string ^{- there is a negative exponent
            # where the minus sign is replaced by the short hyphen
            rv = re.sub(r'\^\{-', r'^{\mhyphen', rv)

        return rv
    
#--- rescaled

fig, ax = plt.subplots(figsize=(4,4))
#ax.axis([1/(20/3e-3),1,3e-3,20])
#ax.loglog()

#colors = ['black','red','green','blue','cyan','brown']
#fillstyles=['white',None,'white',None,'white',None,'white']
#markers=['o','s','D','^','<','v']
#assert len(colors) >= len(X)
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    
    ax.errorbar(X[key]/10**(alpha_c*m_mean[key]),
                 rho_dict[key]*10**(alpha_y*m_mean[key]),
                 yerr=Err[key]*10**(alpha_y*m_mean[key]), 
#                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )
        
(ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),\
#           10**np.ceil(np.log10(np.max(ydata))))
           np.max(ydata))
#(xlo,xhi)=(10**np.floor(np.log10(np.min(xdata))),\
#           10**np.ceil(np.log10(np.max(xdata))))
xc=(np.min(xdata)*np.max(xdata))**.5
xlo = xc/(yhi/ylo)**.5
xhi = xc*(yhi/ylo)**.5

ax.axis([xlo,xhi,ylo,yhi])
ax.loglog()

##--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
ax.xaxis.set_major_formatter(MyLogFormatter()) #--- minus sign too long

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
ax.xaxis.set_major_formatter(MyLogFormatter()) #--- minus sign too long



#fig.legend(title='m',prop=fontP,loc='upper right',frameon=False)#, bbox_to_anchor=(0.4, .6))
ax.tick_params(axis='y',left=True, right=True,which='both')
ax.tick_params(axis='x',bottom=True, top=True,which='both')

#ax.set_xlabel('$t(day)$')
#ax.set_ylabel('$\lambda(t)/10^{%3.2fm}$'%alpha_p)




#--- plot power-law
tc=tlo
(xlo,xhi)=(tc,1e6*tc) #(0.8*15e-3,0.8*.6)
yhi=rho_pref*tc**p*10
ax.plot([xlo,xhi],[yhi, yhi* (xhi/xlo)**p],'r-.',
#        label=r'$\lambda(t)\propto t^{%3.2f}$'%(p),
        linewidth=1)

#ax.legend(fontsize='x-small',frameon=False)
#ax.set_xlabel(r'$t$')
#ax.set_ylabel(r'$\rho(r)\times r_{rup}$')

#ax.set_title('q=%d%%,df=%s'%(quantile*100,Df),fontsize='xx-small')

DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)

#plt.tight_layout()
#plt.savefig('%s/rho_t2nd.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight')

plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-88-4ec4c652c835> in <module>()
     41                 )
     42 
---> 43 (ylo,yhi)=(10**np.floor(np.log10(np.min(ydata))),#           10**np.ceil(np.log10(np.max(ydata))))
     44            np.max(ydata))
     45 #(xlo,xhi)=(10**np.floor(np.log10(np.min(xdata))),\

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/fromnumeric.pyc in amin(a, axis, out, keepdims)
   2418 
   2419     return _methods._amin(a, axis=axis,
-> 2420                           out=out, **kwargs)
   2421 
   2422 

/global/software/anaconda/anaconda-2.7-5.1.0/lib/python2.7/site-packages/numpy/core/_methods.pyc in _amin(a, axis, out, keepdims)
     27 
     28 def _amin(a, axis=None, out=None, keepdims=False):
---> 29     return umr_minimum(a, axis, None, out, keepdims)
     30 
     31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):

ValueError: zero-size array to reduction operation minimum which has no identity
In [89]:
#--- density plots: rho(r) (paper version 2nd)

#plt.style.use('seaborn')
#plt.rcParams.update({'lines.markeredgewidth': 1})

def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#---
ax.spines['right'].set_visible(False) #--- half open
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#---
#ax.axis([1e-3,1e2,1e-4,1e1])
ax.loglog()
#ax.set_xticks(np.logspace(-1,3,4))




#plt.xlabel(r'$R=r/10^{%s m}$'%alpha)
#plt.ylabel(r'$P(R)=P(r)\times 10^{%s m}$'%alpha)
#plt.xlim(1e-5,1e0)
#plt.ylim(1e-5,1e0)

#colors = ['black','red','green','blue','cyan','brown']
#fillstyles=['white',None,'white',None,'white',None,'white']
#markers=['o','s','D','^','<','v']
#assert len(colors) >= len(X)
xmin = ymin = sys.maxint
ymax=xmax=-sys.maxint
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    if np.max(X[key]) > xmax:
        xmax = np.max(X[key])
    if np.min(X[key]) < xmin:
        xmin = np.min(X[key])
    if np.min(rho_dict[key]) < ymin:
        ymin = np.min(rho_dict[key])
    if np.max(rho_dict[key]) > ymax:
        ymax = np.max(rho_dict[key])
    ax.errorbar(X[key],
                 rho_dict[key],
                 yerr=Err[key],
                 label='%2.1f-%2.1f'%(m_range[key][0],m_range[key][1]),
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

(ylo,yhi)=(10**np.floor(np.log10(ymin)),\
#           10**np.ceil(np.log10(ymax)))
           ymax)
xc=(xmin*xmax)**.5
xlo = xc/(yhi/ylo)**.5
xhi = xc*(yhi/ylo)**.5

ax.axis([xlo,xhi,ylo,yhi])

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
ax.xaxis.set_major_formatter(MyLogFormatter()) #--- minus sign too long

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
ax.yaxis.set_major_formatter(MyLogFormatter())


    
#fig.legend(title='m',prop=fontP,loc='lower left',frameon=False)#, bbox_to_anchor=(0.4, .6))
#ax.tick_params(axis='y',left=True, right=True,which='both')
#ax.tick_params(axis='x',bottom=True, top=True,which='both')
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.01,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
#plt.savefig('%s/rho_t.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
plt.show()

ValueErrorTraceback (most recent call last)
<ipython-input-89-c007b939bf40> in <module>()
     52 (ylo,yhi)=(10**np.floor(np.log10(ymin)),#           10**np.ceil(np.log10(ymax)))
     53            ymax)
---> 54 xc=(xmin*xmax)**.5
     55 xlo = xc/(yhi/ylo)**.5
     56 xhi = xc*(yhi/ylo)**.5

ValueError: negative number cannot be raised to a fractional power

$\lambda(t)$ vs. $t$

In [90]:
#--- density plots: rho(r) (paper version 2nd)


def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#---
ax.spines['right'].set_visible(False) #--- half open
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#---
ax.axis([10/(6e3/2e-3),10,2e-3,6e3])
ax.loglog()

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

ax.set_xlabel(r'$t(day)$')
ax.set_ylabel(r'$\lambda(t)$')
#plt.xlim(1e-5,1e0)
#plt.ylim(1e-5,1e0)

#colors = ['black','red','green','blue','cyan']
#fillstyles=['white',None,'white',None,'white',None]
#markers=['o','s','D','^','<']
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    xlabel = r'$%2.1f-%2.1f$'%(m_range[key][0],m_range[key][1])
    if key == 5:
        xlabel = r'$6.4$'
    if key == 7:
        xlabel = r'$7.1$'
    ax.errorbar(X[key],
                 rho_dict[key],
                 yerr=Err[key],
                 label=xlabel,
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

#    ax.legend(title=r'$m$',prop=fontP,loc='lower left',frameon=False, 
#              bbox_to_anchor=(1, 0))
#ax.tick_params(axis='y',left=True, right=True,which='both')
#ax.tick_params(axis='x',bottom=True, top=True,which='both')
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
#plt.savefig('%s/rho_t.png'%DIR_OUTPT_figs,dpi=75,bbox_inches='tight',transparent=True)

rescaled $\lambda(t)$ vs. $t$

In [91]:
#--- density plots: rho(r) (paper version 2nd)


def returnCMtoInch(x,y):
    return (x/2.54,y/2.54)

fig, ax = plt.subplots(figsize=(4,4))
#---
ax.spines['right'].set_visible(False) #--- half open
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#---
ax.axis([10/(6e3/2e-3),10,2e-3,6e3])
ax.loglog()

#--- add major xticks
xmin=np.ceil(np.log10(ax.axis()[0]))
xmax=np.floor(np.log10(ax.axis()[1]))
nbin = xmax - xmin
ax.set_xticks(np.logspace(xmin,xmax,int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

#--- put minor bins
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.xaxis.set_minor_locator(locmin)
ax.xaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- put minor bins y
locmin = matplotlib.ticker.LogLocator(base=10.0,subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1),numticks=12)
ax.yaxis.set_minor_locator(locmin)
ax.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())

#--- rescaled
alpha_c = 0.0 #0.5
alpha_p = 0.6 #--- productivity
alpha_y = alpha_c - alpha_p
p = 1

plt.xlabel(r'$t(day)$')
plt.ylabel(r'$\lambda(t) \times t^{%s}$'%(p))

#plt.xlim(1e-5,1e0)
#plt.ylim(1e-5,1e0)

#colors = ['black','red','green','blue','cyan']
#fillstyles=['white',None,'white',None,'white',None]
#markers=['o','s','D','^','<']
for key,col,mk,fl in zip(X,colors,markers,fillstyles):
    xlabel = r'$%2.1f-%2.1f$'%(m_range[key][0],m_range[key][1])
    if key == 5:
        xlabel = r'$6.4$'
    if key == 7:
        xlabel = r'$7.1$'
    ax.errorbar(X[key],
                 rho_dict[key]*X[key]**p,
                 yerr=Err[key]*X[key]**p, 
                 label=xlabel,
                 fmt='-o', markersize=8,color=col,marker=mk,markerfacecolor=fl,markeredgewidth=0.7,
                linewidth=.5,
                 barsabove=None,capsize=5,capthick=1,elinewidth=1
                )

    ax.legend(title=r'$m$',prop=fontP,loc='lower left',frameon=False, 
              bbox_to_anchor=(1, 0))
#ax.tick_params(axis='y',left=True, right=True,which='both')
#ax.tick_params(axis='x',bottom=True, top=True,which='both')
DrawFrame(ax, (0.22,0.06),(0.14,0.06),0.1,LOG_X=True,LOG_Y=True)


#plt.tight_layout()
#plt.savefig('%s/rho_t_rescaled.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
In [92]:
#--- construct trees

from math import *

class tree():
    leafDepth = 0
    n_leaves = 0
    g_netx = None
    leaf_generation = {} #--- key: leaf id;  val: generation
    nsize = 1 #--- cluster size
    rtm_dic = {}
    #---
    def __init__( self, data, coord, coord_parent, n_generation ):
        self.data = data #--- id
        self.coord = coord
        self.coord_parent = coord_parent
        self.n_daughters = 0
        self.daughter_obj = {}
        self.n_generation = n_generation
    #---
    def insert( self, data, parent_id, coord, coord_parent, n_generation ):
        if parent_id == self.data: #--- parent exists
            self.daughter_obj[ self.n_daughters ] = tree( data, coord, coord_parent, n_generation )
            self.n_daughters += 1
            return
        else:
            for keys in self.daughter_obj:
                self.daughter_obj[ keys ].insert( data, parent_id, coord, coord_parent, n_generation )	
    #---
    def sprint( self, df ):
        for keys in self.daughter_obj:
            tree.g_netx.add_node(self.daughter_obj[ keys ].data)
            tree.g_netx.add_edge( self.data, self.daughter_obj[ keys ].data)
            self.daughter_obj[ keys ].sprint( df )
    #---
    def getLeaves( self ):
        if len( self.daughter_obj ) == 0: #--- it's a leaf
            tree.leaf_generation[ self.data ] = self.n_generation
        else:
            for keys in self.daughter_obj: #--- search in daughters
                self.daughter_obj[ keys ].getLeaves()
    #---
    def getSize( self ):
        for keys in self.daughter_obj: #--- search in daughters
            tree.nsize += 1
            self.daughter_obj[ keys ].getSize()
    
    def get( self, title ):
        pass


    def get_coords( self ): #--- rtm_dic['node_id'] = [r,t,m]
        for keys in self.daughter_obj:
            child_object = self.daughter_obj[keys]
            cords = child_object.coord
            id_node = child_object.data
            tree.rtm_dic[ id_node ] = cords
            child_object.get_coords()
            
    def plot(self,ax):
#        print 'id=%s'%(self.data), 'generation=%s'%(self.n_generation)
        for keys in self.daughter_obj:
            child_object = self.daughter_obj[keys]
            cords = child_object.coord
            tree.pairs.append(child_object.data) #[self.coord, child_object.coord])
#            ax.annotate("", tuple(cords[1][:2]), xytext=tuple(self.coord[1][:2]),
#                         textcoords='data', annotation_clip=False,
#                        arrowprops=dict(arrowstyle="-|>,head_width=.1,head_length=.4",color="b",linewidth="0.3"))  
            child_object.plot(ax)

    def nodeExist(self, event_id):
        if self.data == event_id:
#            print 'yes'
            return True
        
        for keys in self.daughter_obj:
            child_object = self.daughter_obj[keys]
            if child_object.nodeExist( event_id ):
                return True
        return False

pref = 1.0 / (24.0*60*60)
#--------------------
#----- subset (the same as trig. analysis)
#--------------------
swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
                             'magnitude', 
                             ( mc, sys.maxint ) ) 
swarm_lohi = GetCartesian( swarm_lohi )
swarm_lohi.reset_index(inplace=True,drop=True)

NoTriggerList = set(nij_trig['Parent_id'])-set(nij_trig['Event_id']) #--- list of roots
    
#---
#--- include parent and entire generation
#---
CLUSTER={}
clusterID = 0
stree = {}
magScale = 1.0
for rootID in NoTriggerList: #--- roots: events with no parents
    tree_level = 0
    mainShockList = [ rootID ]
    CLUSTER[clusterID]=[mainShockList[0]]
    t_root = swarm_lohi.loc[ rootID ]['date']
    r_root = swarm_lohi.loc[ rootID ][['x(km)','y(km)','z(km)']].tolist()
    m_root = swarm_lohi.loc[ rootID ]['magnitude'] 
    stree[ clusterID ] = tree( rootID, [ t_root, r_root, m_root ],
                                           [ t_root, r_root, m_root ], 
                                           tree_level )
    while len(mainShockList): #--- from root down to leaves
        newMainShockList = []
        for mainShockID, counter in zip( mainShockList, xrange( sys.maxint ) ):
            try:
                newMainShockList += d_trig[ mainShockID ] #--- aftershocks become new
            except:
#                traceback.print_exc()
                continue
            for aftershockID in d_trig[ mainShockID ]:
                CLUSTER[ clusterID ].append(aftershockID)
#                dt = (swarm_lohi.loc[ aftershockID ]['date']-swarm_lohi.loc[ rootID ]['date']).total_seconds() * pref 
                t_afs = swarm_lohi.loc[ aftershockID ]['date']
#                -swarm_lohi.loc[ rootID ]['date']).total_seconds() * pref 
                r_afs = swarm_lohi.loc[aftershockID][['x(km)','y(km)','z(km)']].tolist()
                m_afs = swarm_lohi.loc[aftershockID]['magnitude']
                t_parent = swarm_lohi.loc[ mainShockID ]['date']
                r_parent = swarm_lohi.loc[mainShockID][['x(km)','y(km)','z(km)']].tolist()
                m_parent = swarm_lohi.loc[mainShockID]['magnitude']
                stree[ clusterID ].insert( aftershockID, mainShockID, 
                                          [t_afs, r_afs, m_afs],
                                          [t_parent, r_parent, m_parent],
                                          tree_level + 1 )
        mainShockList = [ i for i in newMainShockList ]
        tree_level += 1
    assert len(mainShockList) == 0 #--- leaf
    clusterID+=1
    
sl=[]
for keys in stree:
    sl.append([stree[keys].n_daughters,stree[keys].data,keys])
sl.sort(reverse=True)
sl[:3]
Out[92]:
[[249, 962, 183], [12, 325, 11], [11, 1511, 146]]
In [93]:
x=(-97,-96.5)
y=(35.8,36)
swarm_lohi.sort_values(by='magnitude',ascending=False).head() #[(swarm_lohi['longitude']>x[0]) & (swarm_lohi['longitude']<x[1]) & (swarm_lohi['latitude']>y[0]) & (swarm_lohi['latitude']<y[1])].head(n=10)
Out[93]:
Event ID date magnitude latitude longitude depth Azimuth(T) Plunge(T) Azimuth(P) Plunge(P) x(km) y(km) z(km)
408 38457511 2019-07-06 03:19:53.040 7.10 35.770 -117.599 8.0 293.0 6.0 25.0 15.0 16.809243 28.403809 8.0
3 38443183 2019-07-04 17:33:49.000 6.40 35.705 -117.504 10.5 94.0 2.0 4.0 5.0 27.384595 21.191790 10.5
423 38457687 2019-07-06 03:47:53.420 5.50 35.901 -117.750 5.0 103.0 7.0 194.0 13.0 0.000000 42.939039 5.0
442 38457847 2019-07-06 04:18:55.790 5.44 35.910 -117.685 7.4 299.0 2.0 209.0 15.0 7.235767 43.937654 7.4
263 38450263 2019-07-05 11:07:53.040 5.36 35.760 -117.575 7.0 92.0 11.0 2.0 0.0 19.480911 27.294262 7.0
In [94]:
#--- a large event belongs to what cluster

event_id = 5 #1509
FOUND = False
for item in sl:
    cls_id = item[2]
    root_id = item[1]    
    if stree[cls_id].nodeExist(event_id):
        FOUND = True
        break
if FOUND:
    print 'root_id=%s'%root_id
else:
    print 'it is a background event'
root_id=5
In [95]:
#--- leaf vs. depth

import networkx as nx
#import treeDataStruct as tds
from itertools import count
import random as rnd

exp = 4
for item, index in zip(sl[:6],xrange(sys.maxint)):
    sid = item[2]
    root_id = item[1]
 #   print 'root_id=',root_id
    tree.g_netx = None
    tree.g_netx = nx.DiGraph(directed=True)
    tree.g_netx.add_node(stree[ sid ].data,color='b')
    stree[ sid ].sprint(swarm_lohi )
    
    # read edgelist from grid.edgelist
    nx.write_edgelist(tree.g_netx, path="grid.edgelist", delimiter=":")
    H = nx.read_edgelist(path="grid.edgelist", delimiter=":") #,create_using=nx.DiGraph())
#    nx.draw(H,with_labels=True,arrows = True)#pos=nx.spring_layout(H))
#    nx.draw_networkx(tree.g_netx, arrows=True, with_labels=True,node_color='b') #, **kwds)

    pos = nx.spring_layout(H)
    nodes=H.nodes

    colors = ['red' for n in nodes]
    sizes=[exp**swarm_lohi.loc[ int(n) ]['magnitude'] for n in nodes]

    for n,indx in zip(nodes,xrange(sys.maxint)):
        if int(n) == root_id:
            colors[indx] = 'gray'
    ec = nx.draw_networkx_edges(H, pos, alpha=0.2,arrows = True)
    nc = nx.draw_networkx_nodes(H, pos,node_size=sizes, node_color=colors,cmap=plt.cm.seismic) #, nodelist=nodes, , 
 #                           with_labels=False, node_size=100, cmap=plt.cm.jet)

    plt.axis('off')
#    plt.savefig('%s/topology.%s.png'%(DIR_OUTPT_figs,index),dpi=75)#,bbox_inches='tight')
    plt.show()

size vs. depth

In [96]:
mpl.rcParams.update(mpl.rcParamsDefault)
warnings.filterwarnings('ignore') #--- get rid of warnings

rc('text', usetex=True)
font = {'size'   : 26}
matplotlib.rc('font', **font)

fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xscale('linear')
ax.set_yscale('log')
#ax.set_xlabel('mean depth')
#ax.set_ylabel('size')
ax.axis([0,7.0,1,1e2])
ax.set_yscale('log')
#--- add major xticks
xmin=np.ceil(ax.axis()[0])
xmax=np.floor(ax.axis()[1])
nbin = xmax - xmin
ax.set_xticks(np.linspace(ax.axis()[0],ax.axis()[1],int(nbin)+1))

#--- add major yticks
ymin=np.ceil(np.log10(ax.axis()[2]))
ymax=np.floor(np.log10(ax.axis()[3]))
nbin = ymax - ymin
ax.set_yticks(np.logspace(ymin,ymax,int(nbin)+1))

MD=[]
S=[]
M=[]
root=[]
for item in sl:
    sid = item[2]
    root_id = item[1]
    root.append(root_id)

    tree.leaf_generation={} #--- initialize
#    stree[ sid ].initialize()
    stree[ sid ].getLeaves() #--- call function
    d = stree[ sid ].leaf_generation
#    print 'd[%s]=%s'%(sid,d)
    mean_depth = np.mean( d.values() ) #np.mean( d.values() ) #1.0 * tds.tree.leafDepth / tds.tree.n_leaves
    #--- size
    tree.nsize = 1 #--- initialize
    stree[ sid ].getSize() #--- call function
    MD.append(mean_depth)
    S.append(stree[ sid ].nsize)
    M.append(swarm_lohi.loc[stree[ sid ].data]['magnitude'])

exp = 5*.7
S=np.array(S)
MD = np.array(MD)
M=np.array(M)
root=np.array(root)
scatter=ax.scatter(MD, S,
            marker='o',color='gray',edgecolors='none',alpha=0.5,
            s=exp**np.array(M))

#--- color specific clusters
cond = np.all([S>=5,MD>0.0],axis=0)
scatter=ax.scatter(MD[cond], S[cond],
            marker='o',color='red',edgecolors='none',#alpha=0.5,
            s=exp**np.array(M[cond]))
#print root[10<=S], S[10<=S] 
print 'roots=',root[cond]
print 'size',S[cond]
print 'depth',MD[cond]

# #--- legend
# M=np.array(M)
# m_min=M.min()
# m_max=M.max()
# m_mean=(m_min+m_max)/2 #M.mean()
# l1 = plt.scatter([-2],[-2], s=exp**m_min, c='gray',edgecolors='none')
# l2 = plt.scatter([-3],[-3], s=exp**m_mean,c='gray', edgecolors='none')
# l3 = plt.scatter([-4],[-4], s=exp**m_max,c='gray', edgecolors='none')
# labels = ["%2.1f"%m_min, "%2.1f"%m_mean, "%2.1f"%m_max]
# leg = plt.legend([l1, l2, l3], labels, ncol=3,fontsize=16, frameon=False,
#                 handlelength=0, borderpad = 0, labelspacing=0,columnspacing=2,prop=None,
#                 handletextpad=1, scatterpoints = 1,loc='upper right', bbox_to_anchor=(.92, 0.95))
#--- frame
DrawFrame(ax, (0.24,0.08),(0.14,0.06),0.1,LOG_Y=True)

#--- save
#plt.savefig('%s/topology.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight')

mpl.rcParams.update(mpl.rcParamsDefault)
warnings.filterwarnings('ignore') #--- get rid of warnings
rc('text', usetex=True)
font = {'size'   : 26}
matplotlib.rc('font', **font)
roots= [ 962  325 1511  925  271  462  439  614    5 2197 1815  508   58 1301
  810  661  572  196   80  792  476   65  892  520  440  102   76   35
   28]
size [278  21  13  52  18  12  14   8  19   6   9   6   6   6   5   7   5   7
  10   5   5   5   6  43   8   5  10  10   8]
depth [1.11068702 1.53333333 1.09090909 1.82       1.5        1.2
 1.41666667 1.16666667 2.16666667 1.25       2.         1.25
 1.25       1.5        1.33333333 2.         1.33333333 1.75
 2.         1.66666667 2.         1.66666667 2.33333333 3.14814815
 2.2        2.5        2.33333333 4.         2.5       ]

trig. cascades

In [97]:
#--- leaf vs. depth

import networkx as nx
#import treeDataStruct as tds
from itertools import count
import random as rnd



#dd={'ROOT':407,
#    'exponent':4.0*2.7}
#dd={'ROOT':325,
#      'exponent':4.0*2.7}
dd={'ROOT':1,#1505,
     'exponent':4.0*.8}

#for ii in xrange(1):
ROOT = dd['ROOT']
exponent=dd['exponent']

for item in sl:
    cls_id = item[2]
    root_id = item[1]
    if root_id != ROOT:
        continue
    tree.pairs = []
    stree[cls_id].plot(ax)
    tree.pairs = [root_id]+tree.pairs
    
for item in sl:
    sid = item[2]
    root_id = item[1]
    if root_id != ROOT:
        continue
    print 'root_id=',root_id, 'n=%s'%(item[0])
    tree.g_netx = None
    tree.g_netx = nx.DiGraph(directed=True)
    tree.g_netx.add_node(stree[ sid ].data,color='b')
    stree[ sid ].sprint(swarm_lohi )
    
    # read edgelist from grid.edgelist
    nx.write_edgelist(tree.g_netx, path="grid.edgelist", delimiter=":")
    H = nx.read_edgelist(path="grid.edgelist", delimiter=":") #,create_using=nx.DiGraph())
#    nx.draw(H,with_labels=True,arrows = True)#pos=nx.spring_layout(H))
#    nx.draw_networkx(tree.g_netx, arrows=True, with_labels=True,node_color='b') #, **kwds)

    pos = nx.spring_layout(H)
    nodes=H.nodes

    colors = ['red' for n in nodes]
    
    sizes=[exponent**swarm_lohi.loc[ int(n) ]['magnitude'] for n in nodes]
    node_shape = ['o' for n in nodes]
    for n,indx in zip(nodes,xrange(sys.maxint)):
        if int(n) == root_id:
            colors[indx] = 'yellow'
#            node_shape[indx]='*'

    fig=plt.figure(figsize=(8,8))
    ax=fig.add_subplot(111)
    ec = nx.draw_networkx_edges(H, pos, alpha=0.2,arrows = True, ax= ax,edge_color='b',width=3)
    nc = nx.draw_networkx_nodes(H, pos,node_size=sizes, node_color=colors,cmap=plt.cm.seismic, ax=ax,
                               edgecolors='black',node_shape='o') #, nodelist=nodes, , 
 #                           with_labels=False, node_size=100, cmap=plt.cm.jet)

    plt.axis('on')
    ax.tick_params(labelbottom=False,labelleft=False,length=0.0)
    fig.show()
#    DrawFrame(ax, (0.0,0.0),(0.0,0.0),.1)
#    DrawFrame(ax, (0.16,0.16),(0.07,0.03),0.1)
    DrawFrame(ax, (0.16,0.1),(0.09,0.03),0.1)


#plt.tight_layout()
#    fig.savefig('%s/tree.png'%DIR_OUTPT_figs,dpi=300,bbox_inches='tight',transparent=True)
    
    

timeseries associated with the trig. cascade

In [98]:
#--- plot clusters


def Inside(t,(tmin,tmax)):
    if tmin<= t<tmax:
        return True
    return False    

def GetTrigList( nij_trig ):
    d_trig = {}
    for items in nij_trig.itertuples():
        triggerID = items.Parent_id
        d_trig.setdefault( triggerID, [] ).append( items.Event_id )
    return d_trig

def PlotArrowsTime(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
    tlist=[]
    mlist=[]
    xm = []
    ym = []
    for triggerID in d_trig: #--- mother events
        
        t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
        m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
        if not ( Inside(t0,(tmin,tmax) ) and #--- within the given interval
                 Inside(swarm_lohi['x(km)'].iloc[triggerID],(xmin,xmax)) and 
                 Inside(swarm_lohi['y(km)'].iloc[triggerID],(ymin,ymax)) ):
            continue

#        x0 = md.date2num( t0 ) #--- convert to time object
#        y0 = m0 #--- magnitude
        
#        tlist.append( t0 )
#        mlist.append( m0 )

        for daughter_id in d_trig[triggerID]: #--- children
            
            t1 = df_complete['date'].iloc[ daughter_id ] #--- time
            m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
            
            
            if not ( Inside(t1,(tmin,tmax) ) and #--- within the given interval
                     Inside(swarm_lohi['x(km)'].iloc[daughter_id],(xmin,xmax)) and 
                     Inside(swarm_lohi['y(km)'].iloc[daughter_id],(ymin,ymax)) ):
                continue
                
#            x1 = md.date2num( t1 ) #--- convert to time object
#            y1 = m1 #--- magnitude

#            xw = x1 - x0 #--- t_child-t_parent
#            yw = y1 - y0 #--- m_child - m_parent
            if daughter_id in tree.pairs:
                tlist.append( t1 )
                mlist.append( m1 )
                ax.annotate("", (t1,m1), xytext=(t0, m0), zorder=1,
                        textcoords='data',clip_on=True,
                        arrowprops=dict(arrowstyle="-|>,head_width=%s,head_length=%s"%(head_width,head_length),
                                        shrinkA=shrinkA,
                                        color="b",linewidth="0.3")) 
    
        #--- plot mother
        if triggerID == tree.pairs[0]: #in roott:
            xm.append(t0)
            ym.append(m0)
            
            #--- plot circles
    df=pd.DataFrame({'date':tlist,'mag':mlist})
    ax.scatter(df['date'], df['mag'],
                s=exponent**(df['mag']),
                alpha=1,zorder=2,
                facecolors='red',color='black')
    #plt.savefig('timeSeries.png')
    for x0,y0 in zip(xm,ym):
#        print x0,y0
        ax.scatter(x0,y0,
                    s=exponent**y0,
                    marker='*', zorder=3,
                    facecolors='yellow',color='black',
                    alpha=1.0)
        
#--------------------------------------
#----- key: event value: aftershock id
#--------------------------------------
d_trig = GetTrigList( nij_trig )

# #--------------------
# #----- subset (the same as trig. analysis)
# #--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# swarm_lohi.reset_index(inplace=True,drop=True)

# #--------------------
# #--- cartesian coords
# #--------------------
# swarm_lohi = GetCartesian( swarm_lohi )



# dd={'ROOT':407,
#     'exponent':4.5*3.2,
#     'dt_before':0.005,'dt_after':.1,
#     'head_width':.2,'head_length':.4,
#       'shrinkA':0}
# dd={'ROOT':325,
#     'exponent':4.5*3.2,
#     'dt_before':0.005,'dt_after':.1,
#     'head_width':.2,'head_length':.4,
#     'shrinkA':0} 
dd={'ROOT':1,#1505,
    'exponent':4.5*.8,
    'dt_before':0.01,'dt_after':0.25*12,
    'head_width':.2,'head_length':.4,
     'shrinkA':0}

#for ii in xrange(1):
ROOT = dd['ROOT']
exponent=dd['exponent']
dt_before = dd['dt_before']
dt_after=dd['dt_after']
head_width = dd['head_width']
head_length=dd['head_length']
shrinkA=dd['shrinkA']
swarm_lohi = GetCartesian( swarm_lohi )
dff = swarm_lohi.sort_values('magnitude',ascending=False)
roott = [i[1] for i in sl]
#for item in dff.iloc[:30].itertuples(): #--- plot clusters associated with large events
for item in dff.itertuples(): #--- plot clusters associated with large events
    if item.Index != ROOT:
        continue
    
    (xc,yc)=(item.longitude,item.latitude) #(-96.807159,35.985256) #(-98.739380,36.467762) #(-99.0,36.5)
    tc = item.date #pd.to_datetime("2016-09-03 12:02:44.520")
    dx = 1000

    fig = plt.figure(figsize=(24,8),frameon=False) #, dpi=150)
    ax = fig.add_subplot(111)
    
    #--- plot all events
    plt.ylim(mc,swarm_lohi['magnitude'].max())
    plt.scatter(swarm_lohi['date'],swarm_lohi['magnitude'],
                s=exponent**swarm_lohi['magnitude'],
            alpha=0.1)

    tt = tc #swarm_lohi['date'].loc[key_max]
    xx={0:xc,1:yc}

    PlotArrowsTime( swarm_lohi, d_trig,
                       ( tt+datetime.timedelta(days=-dt_before),tt+datetime.timedelta(days=+dt_after)), 
           (xx[0]-dx,xx[0]+dx),
           (xx[1]-dx,xx[1]+dx) )
#    ax.set_xlim(tt+datetime.timedelta(days=-dt_before),swarm_lohi.iloc[tree.pairs]['date'].max())
    ax.set_xlim(tt+datetime.timedelta(days=-dt_before),tt+datetime.timedelta(days=+dt_after))
    ax.set_ylim(mc-0.1,7.5)
    
    ax.tick_params(axis='y',labelsize=32,right=True)
    ax.tick_params(axis='x',rotation=20,labelsize=28,top=True)
    ax.xaxis.set_major_formatter(dates.DateFormatter('%d %b %Y \n %H:%M'))
    #--- frame
    DrawFrame(ax, (0.04,0.04),(0.22,0.04),0.1)
#    fig.savefig('%s/timeSeries.png'%DIR_OUTPT_figs,bbox_inches='tight',dpi=150)

AttributeErrorTraceback (most recent call last)
<ipython-input-98-1c04e11b3301> in <module>()
    151                        ( tt+datetime.timedelta(days=-dt_before),tt+datetime.timedelta(days=+dt_after)),
    152            (xx[0]-dx,xx[0]+dx),
--> 153            (xx[1]-dx,xx[1]+dx) )
    154 #    ax.set_xlim(tt+datetime.timedelta(days=-dt_before),swarm_lohi.iloc[tree.pairs]['date'].max())
    155     ax.set_xlim(tt+datetime.timedelta(days=-dt_before),tt+datetime.timedelta(days=+dt_after))

<ipython-input-98-1c04e11b3301> in PlotArrowsTime(df_complete, d_trig, (tmin, tmax), (xmin, xmax), (ymin, ymax))
     51 #            xw = x1 - x0 #--- t_child-t_parent
     52 #            yw = y1 - y0 #--- m_child - m_parent
---> 53             if daughter_id in tree.pairs:
     54                 tlist.append( t1 )
     55                 mlist.append( m1 )

AttributeError: class tree has no attribute 'pairs'

spatial map associated with the trig. cascade

In [99]:
#--- plot clusters


def Inside(t,(tmin,tmax)):
    if tmin<= t<tmax:
        return True
    return False    

def GetTrigList( nij_trig ):
    d_trig = {}
    for items in nij_trig.itertuples():
        triggerID = items.Parent_id
        d_trig.setdefault( triggerID, [] ).append( items.Event_id )
    return d_trig

    
def PlotArrowsSpace(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
    xlist=[]
    ylist=[]
    mlist=[]
    xm=[]
    ym=[]
    m_mother=[]
    for triggerID in d_trig: #--- mother events
        
        t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
        ( x0, y0 ) = ( swarm_lohi['longitude'].iloc[triggerID], 
                       swarm_lohi['latitude'].iloc[triggerID] )
        m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
        if not ( #Inside(t0,(tmin,tmax) ) and #--- within the given interval
                  Inside(x0,(xmin,xmax)) and 
                  Inside(y0,(ymin,ymax)) ):
             continue

        for daughter_id in d_trig[triggerID]: #--- children
            t1 = df_complete['date'].iloc[ daughter_id ] #--- time
            ( x1, y1 ) = ( swarm_lohi['longitude'].iloc[ daughter_id ], 
                           swarm_lohi['latitude'].iloc[ daughter_id ] )
            m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
                        
            if not ( #Inside(t1,(tmin,tmax) ) and #--- within the given interval
                      Inside(x1,(xmin,xmax)) and 
                      Inside(y1,(ymin,ymax)) ):
                 continue

            if daughter_id in tree.pairs:
                xlist.append( x1 )
                ylist.append( y1 )
                mlist.append( m1 )
                ax.annotate("", (x1,y1), xytext=(x0, y0), zorder=1,
                         textcoords='data', annotation_clip=False, 
#                         arrowprops=dict(width=.2,headlength=.4,shrink=0.5,color='b'))
                            # backgroundcolor="w",
                        arrowprops=dict(arrowstyle="-|>,head_width=%s,head_length=%s"%(head_width,head_length),
                                        shrinkA=shrinkA,
                                        color="b",linewidth="0.3")) 
 
        #--- plot mother
        if triggerID == tree.pairs[0]: # in roott:
            xm.append(x0)
            ym.append(y0)
            m_mother.append(m0)
        

    #--- plot circles
    df=pd.DataFrame({'x(km)':xlist,'y(km)':ylist,'m':mlist})
    ax.scatter(df['x(km)'], df['y(km)'],
                s=exponent**(df['m']),
                alpha=1, zorder=2,
                facecolors='red',color='black')
    
    for x0,y0,m0 in zip(xm,ym,m_mother):
        plt.scatter(x0,y0,
                    s=exponent**m0,
                    marker='*',zorder=3,
                    facecolors='yellow',color='black',
                    alpha=1.0)
    
# #--------------------------------------
# #----- key: event value: aftershock id
# #--------------------------------------
# d_trig = GetTrigList( nij_trig )

# #--------------------
# #----- subset (the same as trig. analysis)
# #--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# swarm_lohi.reset_index(inplace=True,drop=True)

# #--------------------
# #--- cartesian coords
# #--------------------
# swarm_lohi = GetCartesian( swarm_lohi )


# dd={'ROOT':407,
#     'exponent':4.0*2.7,
#     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#     swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}
# dd={'ROOT':325,
#     'exponent':4.0*2.7,
#      'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}
dd={'ROOT':1505,
    'exponent':4.0*0.8,
     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
    'head_width':.15,'head_length':.3,
     'shrinkA':0}

#for ii in xrange(1):
ROOT = dd['ROOT']
exponent=dd['exponent']
dx = dd['dx']
head_width = dd['head_width']
head_length=dd['head_length']
shrinkA=dd['shrinkA']
#--- plot arrows
#for ii in xrange(1):
swarm_lohi = GetCartesian( swarm_lohi )
dff = swarm_lohi.sort_values('magnitude',ascending=False)
roott = [i[1] for i in sl]
#for item in dff.iloc[:30].itertuples(): #--- plot clusters associated with large events
for item in dff.itertuples(): #--- plot clusters associated with large events
    if item.Index != ROOT:
        continue
    
    (xc,yc)=(item.longitude,item.latitude) #(-96.807159,35.985256) #(-98.739380,36.467762) #(-99.0,36.5)
    tc = item.date #pd.to_datetime("2016-09-03 12:02:44.520")
    dt = 1000   #--- setup

    
    fig = plt.figure(figsize=(8,8)) #,frameon=False) #, dpi=300)
    ax = fig.add_subplot(111)
    
    #--- plot all events
    ax.scatter(swarm_lohi['longitude'], swarm_lohi['latitude'], # 'x(km)'],swarm_lohi['y(km)'],
                s=exponent**swarm_lohi['magnitude'],
                alpha=0.1)

    tt = tc #swarm_lohi['date'].loc[key_max]
    xx={0:xc,1:yc}
    PlotArrowsSpace( swarm_lohi, d_trig,
    ( tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt)), 
           (xx[0]-dx,xx[0]+dx),
           (xx[1]-dx,xx[1]+dx) )

    
    ax.set_xlim(xx[0]-dx,xx[0]+dx)
    ax.set_ylim(xx[1]-dx,xx[1]+dx)
    
    
    #--- set tick labels
    xlist=np.linspace(xx[0]-dx,xx[0]+dx,4,endpoint=True)
    ax.set_xticks(xlist)
    xlist=[r'$%5.3f^{\circ}$'%i for i in xlist]
    #
    ylist=np.linspace(xx[1]-dx,xx[1]+dx,4,endpoint=True)
    ax.set_yticks(ylist)
    ylist=[r'$%5.3f^{\circ}$'%i for i in ylist]
#    ax.set_yticklabels(ylist)
    ax.tick_params(labelsize=24)
    ax.tick_params(axis='y',labelright=True,labelleft=False,right=True,left=True)
    ax.tick_params(axis='x',top=True)#,left=True)

    ax.set_xticklabels(xlist,{'fontsize':20})
    ax.set_yticklabels(ylist,{'fontsize':20})
    #--- frame
#    DrawFrame(ax, (0.16,0.16),(0.07,0.03),0.1)
    DrawFrame(ax, (0.1,0.16),(0.09,0.03),0.1)

#fig.savefig('%s/spatialMap.png'%DIR_OUTPT_figs,bbox_inches='tight',dpi=150)

AttributeErrorTraceback (most recent call last)
<ipython-input-99-22d6ad34348b> in <module>()
    111 dd={'ROOT':1505,
    112     'exponent':4.0*0.8,
--> 113      'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
    114       swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
    115     'head_width':.15,'head_length':.3,

AttributeError: class tree has no attribute 'pairs'

spatial map associated with the trig. cascade

In [100]:
#--- plot clusters


def Inside(t,(tmin,tmax)):
    if tmin<= t<tmax:
        return True
    return False    

def GetTrigList( nij_trig ):
    d_trig = {}
    for items in nij_trig.itertuples():
        triggerID = items.Parent_id
        d_trig.setdefault( triggerID, [] ).append( items.Event_id )
    return d_trig

    
def PlotArrowsSpace(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
    xlist=[]
    ylist=[]
    mlist=[]
    tlist=[]
    xm=[]
    ym=[]
    m_mother=[]
    tm=[]
    for triggerID in d_trig: #--- mother events
        
        t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
        ( x0, y0 ) = ( swarm_lohi['longitude'].iloc[triggerID], 
                       swarm_lohi['latitude'].iloc[triggerID] )
        m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
        if not ( #Inside(t0,(tmin,tmax) ) and #--- within the given interval
                  Inside(x0,(xmin,xmax)) and 
                  Inside(y0,(ymin,ymax)) ):
             continue

        for daughter_id in d_trig[triggerID]: #--- children
            t1 = df_complete['date'].iloc[ daughter_id ] #--- time
            ( x1, y1 ) = ( swarm_lohi['longitude'].iloc[ daughter_id ], 
                           swarm_lohi['latitude'].iloc[ daughter_id ] )
            m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
                        
            if not ( #Inside(t1,(tmin,tmax) ) and #--- within the given interval
                      Inside(x1,(xmin,xmax)) and 
                      Inside(y1,(ymin,ymax)) ):
                 continue

            if daughter_id in tree.pairs:
                xlist.append( x1 )
                ylist.append( y1 )
                mlist.append( m1 )
                tlist.append( t1 )
                ax.annotate("", (x1,y1), xytext=(x0, y0), zorder=1,
                         textcoords='data', annotation_clip=False, 
#                         arrowprops=dict(width=.2,headlength=.4,shrink=0.5,color='b'))
                            # backgroundcolor="w",
                        arrowprops=dict(arrowstyle="-|>,head_width=%s,head_length=%s"%(head_width,head_length),
                                        shrinkA=shrinkA,
                                        color="b",linewidth="0.3")) 
 
        #--- plot mother
        if triggerID == tree.pairs[0]: # in roott:
            xm.append(x0)
            ym.append(y0)
            m_mother.append(m0)
            tm.append(t0)
        

    #--- plot circles
    df=pd.DataFrame({'x(km)':xlist,'y(km)':ylist,'m':mlist,'t':tlist})
    ax.scatter(df['x(km)'], df['y(km)'],
                s=exponent**(df['m']),
                alpha=.5, zorder=2,# c = df['t']-tm[0],cmap='jet')#,
                facecolors='red',color='black')
    

    for x0,y0,m0 in zip(xm,ym,m_mother):
        plt.scatter(x0,y0,
                    s=exponent**m0,
                    marker='*',zorder=3,
                    facecolors='yellow',color='black',
                    alpha=1.0)
    
# #--------------------------------------
# #----- key: event value: aftershock id
# #--------------------------------------
# d_trig = GetTrigList( nij_trig )

# #--------------------
# #----- subset (the same as trig. analysis)
# #--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# swarm_lohi.reset_index(inplace=True,drop=True)

# #--------------------
# #--- cartesian coords
# #--------------------
# swarm_lohi = GetCartesian( swarm_lohi )


# dd={'ROOT':407,
#     'exponent':4.0*2.7,
#     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#     swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}
# dd={'ROOT':325,
#     'exponent':4.0*2.7,
#      'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}

#--- plot arrows
#for ii in xrange(1):
swarm_lohi = GetCartesian( swarm_lohi )
dff = swarm_lohi.sort_values('magnitude',ascending=False)
roott = [i[1] for i in sl]

    
xxx=np.array(sl)

sr=[[j,i] for i,j in zip(root,S)]
sr.sort(reverse=True)
#sr[:10]

#for ROOT in root[cond]:
for ii in sr[:5]:
    ROOT = ii[1] 
#    if not ROOT == 1:
#        continue
    cls_id = xxx[xxx[:,1]==ROOT][0][2]
    tree.pairs = []
    stree[cls_id].plot(ax)
    tree.pairs = [ROOT]+tree.pairs
    
    dd={'ROOT':ROOT,
    'exponent':4.0*0.8,
#     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
    'dx':0.3*max(swarm_lohi['longitude'].max()-swarm_lohi['longitude'].min(),
    swarm_lohi['latitude'].max()-swarm_lohi['latitude'].min()),
    'head_width':.15,'head_length':.3,
     'shrinkA':0}
    exponent=dd['exponent']
    dx = dd['dx']
    head_width = dd['head_width']
    head_length=dd['head_length']
    shrinkA=dd['shrinkA']

#for ii in xrange(1):
#ROOT = dd['ROOT']
    #for item in dff.iloc[:30].itertuples(): #--- plot clusters associated with large events
    item = dff.loc[ROOT]
    if 1:
#    for item in dff.itertuples(): #--- plot clusters associated with large events
#        if item.Index != ROOT:
#            continue
        
        (xc,yc)=(item['longitude'],item['latitude']) 
#        (xc,yc)=(swarm_lohi['longitude'].mean(),swarm_lohi['latitude'].mean())
        tc = item['date'] #pd.to_datetime("2016-09-03 12:02:44.520")
        dt = 1000   #--- setup
    
        
        fig = plt.figure(figsize=(8,8)) #,frameon=False) #, dpi=300)
        ax = fig.add_subplot(111)
        
        #--- plot all events
        ax.scatter(swarm_lohi['longitude'], swarm_lohi['latitude'], # 'x(km)'],swarm_lohi['y(km)'],
                    s=exponent**swarm_lohi['magnitude'],
                    alpha=0.1)
    
        tt = tc #swarm_lohi['date'].loc[key_max]
        xx={0:xc,1:yc}
        PlotArrowsSpace( swarm_lohi, d_trig,
        ( tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt)), 
               (xx[0]-dx,xx[0]+dx),
               (xx[1]-dx,xx[1]+dx) )
    
        
        ax.set_xlim(xx[0]-dx,xx[0]+dx)
        ax.set_ylim(xx[1]-dx,xx[1]+dx)
        
        
        #--- set tick labels
        xlist=np.linspace(xx[0]-dx,xx[0]+dx,4,endpoint=True)
        ax.set_xticks(xlist)
        xlist=[r'$%5.3f^{\circ}$'%i for i in xlist]
        #
        ylist=np.linspace(xx[1]-dx,xx[1]+dx,4,endpoint=True)
        ax.set_yticks(ylist)
        ylist=[r'$%5.3f^{\circ}$'%i for i in ylist]
    #    ax.set_yticklabels(ylist)
        ax.tick_params(labelsize=24)
        ax.tick_params(axis='y',labelright=True,labelleft=False,right=True,left=True)
        ax.tick_params(axis='x',top=True)#,left=True)
    
        ax.set_xticklabels(xlist,{'fontsize':20})
        ax.set_yticklabels(ylist,{'fontsize':20})
        #--- frame
    #    DrawFrame(ax, (0.16,0.16),(0.07,0.03),0.1)
        DrawFrame(ax, (0.1,0.16),(0.09,0.03),0.1)

#        fig.savefig('%s/spatialMap.%s.png'%(DIR_OUTPT_figs,ROOT),bbox_inches='tight',dpi=150)

plot separate clusters

In [101]:
#--- plot clusters


def Inside(t,(tmin,tmax)):
    if tmin<= t<tmax:
        return True
    return False    

def GetTrigList( nij_trig ):
    d_trig = {}
    for items in nij_trig.itertuples():
        triggerID = items.Parent_id
        d_trig.setdefault( triggerID, [] ).append( items.Event_id )
    return d_trig

    
def PlotArrowsSpace(df_complete,d_trig, (tmin,tmax), (xmin,xmax), (ymin,ymax) ):
    xlist=[]
    ylist=[]
    mlist=[]
    tlist=[]
    xm=[]
    ym=[]
    m_mother=[]
    tm=[]
    for triggerID in d_trig: #--- mother events
        
        t0 = df_complete['date'].iloc[ triggerID ] #--- time of mother event
#        ( x0, y0 ) = ( swarm_lohi['longitude'].iloc[triggerID], 
#                       swarm_lohi['latitude'].iloc[triggerID] )
        ( X0, Y0 ) = ( swarm_lohi['x(km)'].iloc[triggerID], 
                       swarm_lohi['y(km)'].iloc[triggerID] )
        m0 = df_complete['magnitude'].iloc[triggerID] #--- magnitude
        
#        if not ( #Inside(t0,(tmin,tmax) ) and #--- within the given interval
#                  Inside(X0,(xmin,xmax)) and 
#                  Inside(Y0,(ymin,ymax)) ):
#             continue

        for daughter_id in d_trig[triggerID]: #--- children
            t1 = df_complete['date'].iloc[ daughter_id ] #--- time
#            ( x1, y1 ) = ( swarm_lohi['longitude'].iloc[ daughter_id ], 
#                           swarm_lohi['latitude'].iloc[ daughter_id ] )
            ( X1, Y1 ) = ( swarm_lohi['x(km)'].iloc[ daughter_id ], 
                           swarm_lohi['y(km)'].iloc[ daughter_id ] )
            m1 = df_complete['magnitude'].iloc[ daughter_id ] #--- magnitude
                        
#            if not ( #Inside(t1,(tmin,tmax) ) and #--- within the given interval
#                      Inside(X1,(xmin,xmax)) and 
#                      Inside(Y1,(ymin,ymax)) ):
#                 continue

            if daughter_id in d_trig[ROOT]:
                xlist.append( X1 )
                ylist.append( Y1 )
                mlist.append( m1 )
                tlist.append( t1 )
                ax.annotate("", (X1,Y1), xytext=(X0, Y0), zorder=1,
                         textcoords='data', annotation_clip=False, 
#                         arrowprops=dict(width=.2,headlength=.4,shrink=0.5,color='b'))
                            # backgroundcolor="w",
                        arrowprops=dict(arrowstyle="-|>,head_width=%s,head_length=%s"%(head_width,head_length),
                                        shrinkA=shrinkA,
                                        color="b",linewidth="0.3")) 
 
        #--- plot mother
        if triggerID == ROOT: #tree.pairs[0]: # in roott:
            xm.append(X0)
            ym.append(Y0)
            m_mother.append(m0)
            tm.append(t0)
        

    #--- plot circles
    df=pd.DataFrame({'x(km)':xlist,'y(km)':ylist,'m':mlist,'t':tlist})
    ax.scatter(df['x(km)'], df['y(km)'],
                s=exponent**(df['m']),
                alpha=.5, zorder=2,# c = df['t']-tm[0],cmap='jet')#,
                facecolors='red',color='black')

    for x0,y0,m0 in zip(xm,ym,m_mother):
        plt.scatter(x0,y0,
                    s=exponent**m0,
                    marker='*',zorder=3,
                    facecolors='yellow',color='black',
                    alpha=1.0)
    print m_mother[0]
    return (df['x(km)']-xm[0] + 1j*(df['y(km)']-ym[0])).abs(),df['t']-tm[0]
# #--------------------------------------
# #----- key: event value: aftershock id
# #--------------------------------------
d_trig = GetTrigList( nij_trig )

# #--------------------
# #----- subset (the same as trig. analysis)
# #--------------------
# swarm_lohi = DataFrameSubSet( swarm, #--- complete catalog
#                              'magnitude', 
#                              ( mc, sys.maxint ) ) 
# swarm_lohi.reset_index(inplace=True,drop=True)

#--------------------
#--- cartesian coords
#--------------------
swarm_lohi = GetCartesian( swarm_lohi )


# dd={'ROOT':407,
#     'exponent':4.0*2.7,
#     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#     swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}
# dd={'ROOT':325,
#     'exponent':4.0*2.7,
#      'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
#     'head_width':.15,'head_length':.3,
#      'shrinkA':0}

#--- plot arrows
#for ii in xrange(1):
swarm_lohi = GetCartesian( swarm_lohi )
#dff = swarm_lohi.sort_values('magnitude',ascending=False)
#roott = [i[1] for i in sl]

    
#xxx=np.array(sl)

#sr=[[j,i] for i,j in zip(root,S)]
#sr.sort(reverse=True)
#sr[:10]

#for ROOT in root[cond]:
if 1: #for ii in sr[:20]:
#     ROOT = ii[1] 
#     if not ROOT == 1:
#         continue
#     cls_id = xxx[xxx[:,1]==ROOT][0][2]
#     tree.pairs = []
#     stree[cls_id].plot(ax)
#     tree.pairs = [ROOT]+tree.pairs
    ROOT = 5 #1509 #5
    dd={'ROOT':ROOT,
    'exponent':4.0*0.8,
#     'dx':max(swarm_lohi.iloc[tree.pairs]['longitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['longitude'],
#      swarm_lohi.iloc[tree.pairs]['latitude'].max()-swarm_lohi.iloc[tree.pairs[0]]['latitude']),
    'dx':0.2*max(swarm_lohi['x(km)'].max()-swarm_lohi['x(km)'].min(),
    swarm_lohi['y(km)'].max()-swarm_lohi['y(km)'].min()),
    'head_width':.15,'head_length':.3,
     'shrinkA':0}
    exponent=dd['exponent']
    dx = dd['dx']
    head_width = dd['head_width']
    head_length=dd['head_length']
    shrinkA=dd['shrinkA']

#for ii in xrange(1):
#ROOT = dd['ROOT']
    #for item in dff.iloc[:30].itertuples(): #--- plot clusters associated with large events
    item = swarm_lohi.loc[ROOT]
    if 1:
#    for item in dff.itertuples(): #--- plot clusters associated with large events
#        if item.Index != ROOT:
#            continue
        
        (xc,yc)=(item['x(km)'],item['y(km)']) 
#        (xc,yc)=(swarm_lohi['longitude'].mean(),swarm_lohi['latitude'].mean())
        tc = item['date'] #pd.to_datetime("2016-09-03 12:02:44.520")
        dt = 1000   #--- setup
    
        
        fig = plt.figure(figsize=(8,8)) #,frameon=False) #, dpi=300)
        ax = fig.add_subplot(111)
        
        #--- plot all events
        ax.scatter(swarm_lohi['x(km)'], swarm_lohi['y(km)'], # 'x(km)'],swarm_lohi['y(km)'],
                    s=exponent**swarm_lohi['magnitude'],
                    alpha=0.1)
    
        tt = tc #swarm_lohi['date'].loc[key_max]
        xx={0:xc,1:yc}
        junk,junk_t=PlotArrowsSpace( swarm_lohi, d_trig,
        ( tt+datetime.timedelta(days=-dt),tt+datetime.timedelta(days=+dt)), 
               (xx[0]-dx,xx[0]+dx),
               (xx[1]-dx,xx[1]+dx) )
    
        
        ax.set_xlim(xx[0]-dx,xx[0]+dx)
        ax.set_ylim(xx[1]-dx,xx[1]+dx)
        
        
        #--- set tick labels
        xlist=np.linspace(xx[0]-dx,xx[0]+dx,4,endpoint=True)
        ax.set_xticks(xlist)
        xlist=[r'$%s$'%(int(i)) for i in xlist]
        #
        ylist=np.linspace(xx[1]-dx,xx[1]+dx,4,endpoint=True)
        ax.set_yticks(ylist)
        ylist=[r'$%s$'%(int(i)) for i in ylist]
    #    ax.set_yticklabels(ylist)
        ax.tick_params(labelsize=24)
        ax.tick_params(axis='y',labelright=True,labelleft=False,right=True,left=True)
        ax.tick_params(axis='x',top=True)#,left=True)
    
        ax.set_xticklabels(xlist,{'fontsize':20})
        ax.set_yticklabels(ylist,{'fontsize':20})
        #--- frame
    #    DrawFrame(ax, (0.16,0.16),(0.07,0.03),0.1)
        DrawFrame(ax, (0.1,0.16),(0.09,0.03),0.1)

#fig.savefig('%s/spatialMap.png'%DIR_OUTPT_figs,bbox_inches='tight',dpi=150)
3.73
In [102]:
ROOT = 1505 #1

fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111)
ax.set_xlim(100,1e6)
ax.set_ylim(1e-1,1e3)
ax.set_xscale('log')
ax.set_yscale('log')
    
for keys in stree:
    tree_obj = stree[ keys ]
    if tree_obj.data != ROOT:
        continue
    tree.rtm_dic = {} #--- initialize distance-time list
    tree_obj.get_coords() #--- update
    
    t_root = tree_obj.coord[ 0 ] #--- relative to the root
    r_root = np.array(tree_obj.coord[ 1 ][:2] )
    delta_t = []
    delta_r = []
    mmm=[]
    for key in tree.rtm_dic:
        t_afs = tree.rtm_dic[key][0]
        r_afs = np.array(tree.rtm_dic[key][1][:2])
#         if key == 5:
#             print tree.rtm_dic[key][0]
#             print tree.rtm_dic[key][1][:2]
#             print tree.rtm_dic[key][2]
        delta_t += [ ( t_afs - t_root ).total_seconds() ]
        delta_r += [ ( ( ( r_afs - r_root ) ** 2 ).sum() ) ** 0.5 ]
        mmm += [tree.rtm_dic[key][ 2 ]]

ax.scatter(delta_t,delta_r,s=3**np.array(mmm),alpha=0.1)
        

NameErrorTraceback (most recent call last)
<ipython-input-102-6eb46335e5fd> in <module>()
     31         mmm += [tree.rtm_dic[key][ 2 ]]
     32 
---> 33 ax.scatter(delta_t,delta_r,s=3**np.array(mmm),alpha=0.1)
     34 

NameError: name 'delta_t' is not defined
In [103]:
#print tree.rtm_dic

# swarm_lohi.sort_values(by='date',inplace=True)            
# coord_length=swarm_lohi['longitude'].apply(lambda x:[x])+swarm_lohi['latitude'].apply(lambda x:[x])
# Magnitude = np.exp(swarm_lohi['magnitude'])/20000

KEYS=tree.rtm_dic.keys()
KEYS.sort()

coordd={}
Magnitudee={}
for i in KEYS:
    coordd[i]=tree.rtm_dic[i][1][:2]
    Magnitudee[i]=np.exp(tree.rtm_dic[i][2])/1000
makeDATA( open( '%s/map2.xyz'%DIR_OUTPT, 'w' ), None ).Map( coordd, 'ITIME=%s'%0, Magnitudee) #--- instantanuous
EVENT_ID = Magnitudee.keys()
EVENT_ID = map(int,EVENT_ID)
EVENT_ID.sort()
cords={}
mag={}
t0 = None
os.system('rm %s/temporalMap2.xyz'%DIR_OUTPT)
for j_event in EVENT_ID:
    t = swarm_lohi['date'].iloc[ j_event ]
    if not t0:
        t0 = t
    assert t >= t0, 't0=%s,t=%s'%( t0, t )
    #--- temporal map 
    cords[ j_event ] = coordd[ j_event ]
    mag[ j_event ] = Magnitudee[ j_event ]
    itime=swarm_lohi[ 'date' ].iloc[ j_event ]
    tstr='%s-%s'%(itime.strftime("%x"), itime.strftime("%X"))
    makeDATA( open( '%s/temporalMap2.xyz'%DIR_OUTPT, 'a' ), None ).Map( cords, 'ITIME=%s'%itime, mag ) #--- instantanuous
    t0 = t

NameErrorTraceback (most recent call last)
<ipython-input-103-0066fe586382> in <module>()
     13     coordd[i]=tree.rtm_dic[i][1][:2]
     14     Magnitudee[i]=np.exp(tree.rtm_dic[i][2])/1000
---> 15 makeDATA( open( '%s/map2.xyz'%DIR_OUTPT, 'w' ), None ).Map( coordd, 'ITIME=%s'%0, Magnitudee) #--- instantanuous
     16 EVENT_ID = Magnitudee.keys()
     17 EVENT_ID = map(int,EVENT_ID)

NameError: name 'makeDATA' is not defined
In [104]:
swarm_loh = swarm_lohi.iloc[6:1510]
r0=np.array(swarm_lohi.iloc[5][['x(km)','y(km)']])
t0 = swarm_lohi.iloc[5][['date']]
m0 = swarm_lohi.iloc[5][['magnitude']]
dt=swarm_loh['date'].apply(lambda x:(x-t0))
dr=swarm_loh['x(km)'].apply(lambda x:x-r0[0])+swarm_loh['y(km)'].apply(lambda x:1j*(x-r0[1]))
dr=dr.abs()
dt_sec = dt['date'].apply(lambda x:x.total_seconds()/3600)
print m0
magnitude    3.73
Name: 5, dtype: object
In [105]:
fig=plt.figure(figsize=(4,4))
ax=fig.add_subplot(111)
ax.set_xlim(1e-1,1e2)
ax.set_ylim(1e-1,1e2)
ax.set_xscale('log')
ax.set_yscale('log')
ax.scatter(dt_sec,dr,s=3.0**(swarm_loh['magnitude']),alpha=0.2)
Out[105]:
<matplotlib.collections.PathCollection at 0x7fcc509fa750>
In [106]:
#--- r vs t (including indirect aftershocks)

dd = {}    
for cls_id in stree:
    index_m = np.array(m).searchsorted(stree[cls_id].coord[2]) - 1 #--- group clusters based on magnitude range
    dd.setdefault(index_m,[]).append(cls_id)

colors = ['black','red','green','blue','cyan','brown']

for group_m, col in zip(dd, colors ): #--- loop over magnitude ranges
    #--- plot
    fig = plt.figure(figsize=(4,4))
    ax = fig.add_subplot(111)
    ax.set_xlim(1,1e5)
    ax.set_ylim(1e-3,1e2)
    ax.set_xscale('log')
    ax.set_yscale('log')


    cls_ids = dd[ group_m ] #--- cluster ids for a given range
    delta_t = []
    delta_r = []
    for cls_id in cls_ids: #--- loop over each cluster
        tree.rtm_dic = {} #--- initialize distance-time list
        stree[cls_id].get_coords() #--- update
        t_root = stree[cls_id].coord[ 0 ] #--- relative to the root
        r_root = np.array( stree[cls_id].coord[ 1 ] )
        for key in tree.rtm_dic:
            t_afs = tree.rtm_dic[key][0]
            r_afs = np.array(tree.rtm_dic[key][1])
            delta_t += [ ( t_afs - t_root ).total_seconds() ]
            delta_r += [ ( ( ( r_afs - r_root ) ** 2 ).sum() ) ** 0.5 ]
        
    
    ax.scatter(delta_t,delta_r,color=col,alpha=0.05)

plt.show()
In [107]:
t0
Out[107]:
date    2019-07-04 17:38:21.450000
Name: 5, dtype: object
In [108]:
#--- r vs t (including indirect aftershocks)

dd = {}    
for cls_id in stree:
    index_m = np.array(m).searchsorted(stree[cls_id].coord[2]) - 1 #--- group clusters based on magnitude range
    dd.setdefault(index_m,[]).append(cls_id)

colors = ['black','red','green','blue','cyan','brown']

for group_m, col in zip(dd, colors ): #--- loop over magnitude ranges
    #--- plot
    fig = plt.figure(figsize=(4,4))
    ax = fig.add_subplot(111)
    ax.set_xlim(1,1e5)
    ax.set_ylim(1e-3,1e2)
    ax.set_xscale('log')
    ax.set_yscale('log')


    cls_ids = dd[ group_m ] #--- cluster ids for a given range
    delta_t = []
    delta_r = []
    for cls_id in cls_ids: #--- loop over each cluster
        tree.rtm_dic = {} #--- initialize distance-time list
        stree[cls_id].get_coords() #--- update
        t_root = stree[cls_id].coord[ 0 ] #--- relative to the root
        r_root = np.array( stree[cls_id].coord[ 1 ] )
        for key in tree.rtm_dic:
            t_afs = tree.rtm_dic[key][0]
            r_afs = np.array(tree.rtm_dic[key][1])
            delta_t += [ ( t_afs - t_root ).total_seconds() ]
            delta_r += [ ( ( ( r_afs - r_root ) ** 2 ).sum() ) ** 0.5 ]
        
    
    ax.scatter(delta_t,delta_r,color=col,alpha=0.05)

plt.show()
In [ ]: